Data reading and cleaning.

grad_set <- read.csv(file = "C:/Users/arzav/Downloads/Admission_Predict_Ver1.1.csv", header = TRUE, stringsAsFactors=FALSE) 
grad_set
str(grad_set)
'data.frame':   500 obs. of  9 variables:
 $ Serial.No.       : int  1 2 3 4 5 6 7 8 9 10 ...
 $ GRE.Score        : int  337 324 316 322 314 330 321 308 302 323 ...
 $ TOEFL.Score      : int  118 107 104 110 103 115 109 101 102 108 ...
 $ University.Rating: int  4 4 3 3 2 5 3 2 1 3 ...
 $ SOP              : num  4.5 4 3 3.5 2 4.5 3 3 2 3.5 ...
 $ LOR              : num  4.5 4.5 3.5 2.5 3 3 4 4 1.5 3 ...
 $ CGPA             : num  9.65 8.87 8 8.67 8.21 9.34 8.2 7.9 8 8.6 ...
 $ Research         : int  1 1 1 1 0 1 1 0 0 0 ...
 $ Chance.of.Admit  : num  0.92 0.76 0.72 0.8 0.65 0.9 0.75 0.68 0.5 0.45 ...

The dataset consists of 500 observations and 9 variables.

It has 9 numerical variables.

summary(grad_set)
   Serial.No.      GRE.Score      TOEFL.Score    University.Rating      SOP       
 Min.   :  1.0   Min.   :290.0   Min.   : 92.0   Min.   :1.000     Min.   :1.000  
 1st Qu.:125.8   1st Qu.:308.0   1st Qu.:103.0   1st Qu.:2.000     1st Qu.:2.500  
 Median :250.5   Median :317.0   Median :107.0   Median :3.000     Median :3.500  
 Mean   :250.5   Mean   :316.5   Mean   :107.2   Mean   :3.114     Mean   :3.374  
 3rd Qu.:375.2   3rd Qu.:325.0   3rd Qu.:112.0   3rd Qu.:4.000     3rd Qu.:4.000  
 Max.   :500.0   Max.   :340.0   Max.   :120.0   Max.   :5.000     Max.   :5.000  
      LOR             CGPA          Research    Chance.of.Admit 
 Min.   :1.000   Min.   :6.800   Min.   :0.00   Min.   :0.3400  
 1st Qu.:3.000   1st Qu.:8.127   1st Qu.:0.00   1st Qu.:0.6300  
 Median :3.500   Median :8.560   Median :1.00   Median :0.7200  
 Mean   :3.484   Mean   :8.576   Mean   :0.56   Mean   :0.7217  
 3rd Qu.:4.000   3rd Qu.:9.040   3rd Qu.:1.00   3rd Qu.:0.8200  
 Max.   :5.000   Max.   :9.920   Max.   :1.00   Max.   :0.9700  

University ranking ranges from 0-5 LOR(letter of recommendation) ranges from 0-5 SOP(statement of purpose) ranges from 0-5 Research is either O or 1 this indicates whether the candidate has research experience or not in undergrad.

colSums(is.na(grad_set))
       Serial.No.         GRE.Score       TOEFL.Score University.Rating               SOP 
                0                 0                 0                 0                 0 
              LOR              CGPA          Research   Chance.of.Admit 
                0                 0                 0                 0 

As, The ‘Serial no.’ is not necessary for the models.So, we will remove this column.

grad_set$Serial.No.<-NULL 
str(grad_set) 
'data.frame':   500 obs. of  8 variables:
 $ GRE.Score        : int  337 324 316 322 314 330 321 308 302 323 ...
 $ TOEFL.Score      : int  118 107 104 110 103 115 109 101 102 108 ...
 $ University.Rating: int  4 4 3 3 2 5 3 2 1 3 ...
 $ SOP              : num  4.5 4 3 3.5 2 4.5 3 3 2 3.5 ...
 $ LOR              : num  4.5 4.5 3.5 2.5 3 3 4 4 1.5 3 ...
 $ CGPA             : num  9.65 8.87 8 8.67 8.21 9.34 8.2 7.9 8 8.6 ...
 $ Research         : int  1 1 1 1 0 1 1 0 0 0 ...
 $ Chance.of.Admit  : num  0.92 0.76 0.72 0.8 0.65 0.9 0.75 0.68 0.5 0.45 ...
colSums(is.na(grad_set)) 
        GRE.Score       TOEFL.Score University.Rating               SOP               LOR 
                0                 0                 0                 0                 0 
             CGPA          Research   Chance.of.Admit 
                0                 0                 0 
grad_set$Research<-as.factor(grad_set$Research) 

Research is converted into factor variable

table(grad_set$Research) 

  0   1 
220 280 
str(grad_set) 
'data.frame':   500 obs. of  8 variables:
 $ GRE.Score        : int  337 324 316 322 314 330 321 308 302 323 ...
 $ TOEFL.Score      : int  118 107 104 110 103 115 109 101 102 108 ...
 $ University.Rating: int  4 4 3 3 2 5 3 2 1 3 ...
 $ SOP              : num  4.5 4 3 3.5 2 4.5 3 3 2 3.5 ...
 $ LOR              : num  4.5 4.5 3.5 2.5 3 3 4 4 1.5 3 ...
 $ CGPA             : num  9.65 8.87 8 8.67 8.21 9.34 8.2 7.9 8 8.6 ...
 $ Research         : Factor w/ 2 levels "0","1": 2 2 2 2 1 2 2 1 1 1 ...
 $ Chance.of.Admit  : num  0.92 0.76 0.72 0.8 0.65 0.9 0.75 0.68 0.5 0.45 ...

Exploratory Data analysis.

library(ggplot2)
ggplot(grad_set, aes(x=Chance.of.Admit, fill=after_stat(count)))+geom_histogram()+ggtitle("Chance.of.Admit")+ylab("frequency")+xlab("Chance.of.Admit")+theme(plot.title=element_text(hjust=0.5))+theme_minimal() 

The tallest bar from the histogram reaches a frequency of as high as 40. There are totally two bars of same height, where one bar lies in the range of 0.6 and 0.7, where as the other bar is at the range of 0.7 and 0.8.

corr <- cor(grad_set[,c("GRE.Score","TOEFL.Score","University.Rating","SOP","LOR","CGPA","Chance.of.Admit")])
corr 
                  GRE.Score TOEFL.Score University.Rating       SOP       LOR      CGPA
GRE.Score         1.0000000   0.8272004         0.6353762 0.6134977 0.5246794 0.8258780
TOEFL.Score       0.8272004   1.0000000         0.6497992 0.6444104 0.5415633 0.8105735
University.Rating 0.6353762   0.6497992         1.0000000 0.7280236 0.6086507 0.7052543
SOP               0.6134977   0.6444104         0.7280236 1.0000000 0.6637069 0.7121543
LOR               0.5246794   0.5415633         0.6086507 0.6637069 1.0000000 0.6374692
CGPA              0.8258780   0.8105735         0.7052543 0.7121543 0.6374692 1.0000000
Chance.of.Admit   0.8103506   0.7922276         0.6901324 0.6841365 0.6453645 0.8824126
                  Chance.of.Admit
GRE.Score               0.8103506
TOEFL.Score             0.7922276
University.Rating       0.6901324
SOP                     0.6841365
LOR                     0.6453645
CGPA                    0.8824126
Chance.of.Admit         1.0000000
library(ggcorrplot)
ggcorrplot(corr, hc.order = TRUE, type = "lower", lab = TRUE, lab_size = 3, method="circle", colors = c("blue", "white", "red"), outline.color = "gray", show.legend = TRUE, show.diag = FALSE, title="College variables") 

cor.test(grad_set$Chance.of.Admit, grad_set$GRE.Score)

    Pearson's product-moment correlation

data:  grad_set$Chance.of.Admit and grad_set$GRE.Score
t = 30.862, df = 498, p-value < 2.2e-16
alternative hypothesis: true correlation is not equal to 0
95 percent confidence interval:
 0.7779406 0.8384601
sample estimates:
      cor 
0.8103506 
cor.test(grad_set$Chance.of.Admit, grad_set$TOEFL.Score)

    Pearson's product-moment correlation

data:  grad_set$Chance.of.Admit and grad_set$TOEFL.Score
t = 28.972, df = 498, p-value < 2.2e-16
alternative hypothesis: true correlation is not equal to 0
95 percent confidence interval:
 0.7571359 0.8227603
sample estimates:
      cor 
0.7922276 
cor.test(grad_set$Chance.of.Admit, grad_set$University.Rating)

    Pearson's product-moment correlation

data:  grad_set$Chance.of.Admit and grad_set$University.Rating
t = 21.281, df = 498, p-value < 2.2e-16
alternative hypothesis: true correlation is not equal to 0
95 percent confidence interval:
 0.6412490 0.7334367
sample estimates:
      cor 
0.6901324 
cor.test(grad_set$Chance.of.Admit, grad_set$SOP)

    Pearson's product-moment correlation

data:  grad_set$Chance.of.Admit and grad_set$SOP
t = 20.932, df = 498, p-value < 2.2e-16
alternative hypothesis: true correlation is not equal to 0
95 percent confidence interval:
 0.6345118 0.7281441
sample estimates:
      cor 
0.6841365 
cor.test(grad_set$Chance.of.Admit, grad_set$LOR)

    Pearson's product-moment correlation

data:  grad_set$Chance.of.Admit and grad_set$LOR
t = 18.854, df = 498, p-value < 2.2e-16
alternative hypothesis: true correlation is not equal to 0
95 percent confidence interval:
 0.5911272 0.6937918
sample estimates:
      cor 
0.6453645 
cor.test(grad_set$Chance.of.Admit, grad_set$CGPA) 

    Pearson's product-moment correlation

data:  grad_set$Chance.of.Admit and grad_set$CGPA
t = 41.855, df = 498, p-value < 2.2e-16
alternative hypothesis: true correlation is not equal to 0
95 percent confidence interval:
 0.8613745 0.9004286
sample estimates:
      cor 
0.8824126 

From the above plots and tests we can see that all the numeric variables are extremely closely associated with the target variable

Plot1 = ggplot(grad_set, aes(x = Chance.of.Admit, y = Research)) + geom_boxplot() 
Plot1 

t.test(Chance.of.Admit~Research,data=grad_set) 

    Welch Two Sample t-test

data:  Chance.of.Admit by Research
t = -14.707, df = 487.6, p-value < 2.2e-16
alternative hypothesis: true difference in means between group 0 and group 1 is not equal to 0
95 percent confidence interval:
 -0.1757700 -0.1343404
sample estimates:
mean in group 0 mean in group 1 
      0.6349091       0.7899643 

From the above plots and test we can conclude that Research is closely associated with Chance.of.Admit

ggplot(grad_set, aes(Chance.of.Admit, color=factor(Research)))+
  geom_density(alpha=0.5)+ggtitle("Chance of admit vs Research Distribution") 

Splitting and Training the data.

Before we can develop the model, we must divide the data into train and test datasets. We will use the train dataset to develop a linear regression model, and the test dataset as a comparison to check if the model becomes overfit or cannot predict fresh data. We will utilize 80% of the data as training data and the remaining 20% as testing data.

set.seed(1)

library(lattice)
library(caret)
train.index=createDataPartition(grad_set$Chance.of.Admit, p=0.8, list = FALSE)
grad_train<-grad_set[train.index, ]
grad_test <-grad_set[-train.index, ]
grad_train_labels = grad_train[train.index, 8]
grad_test_labels = grad_test[-train.index,8] 
grad_train 
grad_test

Multiple Linear Regression.

set.seed(1)

train.control =trainControl(method = "cv", number = 5)
linear_model<-train(Chance.of.Admit~.,data = grad_train, method = 
"lm",trControl = train.control)
linear_model
Linear Regression 

402 samples
  7 predictor

No pre-processing
Resampling: Cross-Validated (5 fold) 
Summary of sample sizes: 321, 322, 322, 322, 321 
Resampling results:

  RMSE        Rsquared   MAE       
  0.05946842  0.8162587  0.04279672

Tuning parameter 'intercept' was held constant at a value of TRUE
summary(linear_model) 

Call:
lm(formula = .outcome ~ ., data = dat)

Residuals:
      Min        1Q    Median        3Q       Max 
-0.266486 -0.021634  0.008613  0.033260  0.154116 

Coefficients:
                    Estimate Std. Error t value Pr(>|t|)    
(Intercept)       -1.2835454  0.1170847 -10.963  < 2e-16 ***
GRE.Score          0.0016785  0.0005665   2.963 0.003229 ** 
TOEFL.Score        0.0031621  0.0009420   3.357 0.000865 ***
University.Rating  0.0057657  0.0041144   1.401 0.161903    
SOP               -0.0018564  0.0049609  -0.374 0.708446    
LOR                0.0167548  0.0045200   3.707 0.000240 ***
CGPA               0.1227098  0.0108989  11.259  < 2e-16 ***
Research1          0.0236633  0.0072757   3.252 0.001243 ** 
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Residual standard error: 0.0591 on 394 degrees of freedom
Multiple R-squared:  0.8244,    Adjusted R-squared:  0.8212 
F-statistic: 264.2 on 7 and 394 DF,  p-value: < 2.2e-16

Linear regression is a model with a great interpretability, thus we’ll interpret this simple linear regression model immediately. - Intercept-based:

Based on the coefficient or slope: When CGPA increases by one value, the Chance.of.Admit increases by around 0.12.

According to the P-value, CGPA is a significant predictor with a linear impact.

Based on R-squared values of 0.8244, the predictor chosen is enough to explain the target variable.

grad_pred<-predict(linear_model, grad_test) 
grad_pred
       11        14        15        21        26        30        34        36        42 
0.7293949 0.6419008 0.6415199 0.6167999 0.9641456 0.4878763 0.9377241 0.8573200 0.6575427 
       44        46        48        49        51        55        57        66        85 
0.8420700 0.8316512 0.9395353 0.8073033 0.6819192 0.6568112 0.5412968 0.7859014 0.9299288 
       88        95       106       108       112       119       125       136       137 
0.6582199 0.5335171 0.7810154 0.9302137 0.7713632 0.4842704 0.6649042 0.7715857 0.6777756 
      149       151       155       161       162       165       184       188       191 
0.9527685 0.9010266 0.7664129 0.5718688 0.4983568 0.8306815 0.7480719 0.9130405 0.8464611 
      198       200       203       213       216       219       225       226       227 
0.6410663 0.7317866 1.0021856 0.9698346 0.9043336 0.8086257 0.6097914 0.5604761 0.6868579 
      228       230       233       235       240       252       258       264       266 
0.6899909 0.8083187 0.6588340 0.8803346 0.5392077 0.7214524 0.7540211 0.7513528 0.6840242 
      275       280       285       289       297       302       304       310       313 
0.5808758 0.6934913 0.9395178 0.7767599 0.7084702 0.7254961 0.7392290 0.7033979 0.7949703 
      316       318       319       330       334       336       339       340       341 
0.6097144 0.5617550 0.7609573 0.5187343 0.7353781 0.8455441 0.7846865 0.7743144 0.7022722 
      342       345       347       350       359       361       374       375       384 
0.7795198 0.4581207 0.5132024 0.6107053 0.5534275 0.7822846 0.7335198 0.5656388 0.6201678 
      388       394       399       403       409       426       430       439       440 
0.6241849 0.7085626 0.7210545 0.7867110 0.5797927 0.9483625 0.8811438 0.7294205 0.6693038 
      443       452       454       458       472       474       485       495 
0.9109914 0.8664408 0.7558696 0.4807588 0.6374909 0.6322280 0.6366300 0.6414584 
RMSE(grad_pred,grad_test$Chance.of.Admit) 
[1] 0.06374082

Backward selection method

We can use Step-wise Regression to finding a combination of predictors that produces the best model based on lowest RMSE value. There are 3 types of Step-wise Regression such as Forward, Backward, and both. We will use LeapBackward. We named it step_model.

train.control2 = trainControl(method = "cv", number = 10) 
step_model <- train(Chance.of.Admit~., data = grad_train,method =
"leapBackward",trControl = train.control2, tuneGrid=data.frame(nvmax=1:7))
step_model
Linear Regression with Backwards Selection 

402 samples
  7 predictor

No pre-processing
Resampling: Cross-Validated (10 fold) 
Summary of sample sizes: 362, 362, 362, 362, 361, 362, ... 
Resampling results across tuning parameters:

  nvmax  RMSE        Rsquared   MAE       
  1      0.06445038  0.7952703  0.04740910
  2      0.06188850  0.8116505  0.04497443
  3      0.06118626  0.8144784  0.04423225
  4      0.06021757  0.8192382  0.04365139
  5      0.05864031  0.8291525  0.04245683
  6      0.05859458  0.8291284  0.04251072
  7      0.05891145  0.8272844  0.04268908

RMSE was used to select the optimal model using the smallest value.
The final value used for the model was nvmax = 6.
summary(step_model$finalModel)
Subset selection object
7 Variables  (and intercept)
                  Forced in Forced out
GRE.Score             FALSE      FALSE
TOEFL.Score           FALSE      FALSE
University.Rating     FALSE      FALSE
SOP                   FALSE      FALSE
LOR                   FALSE      FALSE
CGPA                  FALSE      FALSE
Research1             FALSE      FALSE
1 subsets of each size up to 6
Selection Algorithm: backward
         GRE.Score TOEFL.Score University.Rating SOP LOR CGPA Research1
1  ( 1 ) " "       " "         " "               " " " " "*"  " "      
2  ( 1 ) " "       "*"         " "               " " " " "*"  " "      
3  ( 1 ) " "       "*"         " "               " " " " "*"  "*"      
4  ( 1 ) " "       "*"         " "               " " "*" "*"  "*"      
5  ( 1 ) "*"       "*"         " "               " " "*" "*"  "*"      
6  ( 1 ) "*"       "*"         "*"               " " "*" "*"  "*"      
 
stepwise_grad_pred<-predict(step_model,grad_test)
stepwise_grad_pred 
       11        14        15        21        26        30        34        36        42 
0.7295979 0.6436407 0.6427902 0.6173003 0.9636255 0.4868731 0.9366591 0.8579526 0.6572999 
       44        46        48        49        51        55        57        66        85 
0.8428339 0.8328362 0.9392487 0.8065775 0.6802504 0.6565655 0.5400560 0.7852702 0.9296481 
       88        95       106       108       112       119       125       136       137 
0.6595504 0.5323232 0.7814800 0.9285866 0.7716093 0.4852994 0.6630262 0.7708009 0.6807932 
      149       151       155       161       162       165       184       188       191 
0.9523068 0.9006393 0.7655586 0.5709406 0.4972843 0.8315932 0.7488011 0.9131345 0.8466568 
      198       200       203       213       216       219       225       226       227 
0.6425829 0.7324365 1.0014294 0.9705315 0.9049761 0.8070171 0.6106900 0.5604720 0.6874639 
      228       230       233       235       240       252       258       264       266 
0.6910316 0.8068879 0.6580582 0.8813234 0.5382539 0.7205732 0.7546481 0.7504385 0.6829550 
      275       280       285       289       297       302       304       310       313 
0.5806490 0.6931462 0.9406577 0.7782841 0.7086590 0.7245892 0.7395162 0.7032112 0.7956689 
      316       318       319       330       334       336       339       340       341 
0.6093798 0.5595782 0.7598021 0.5193832 0.7347002 0.8451986 0.7843442 0.7731347 0.7018795 
      342       345       347       350       359       361       374       375       384 
0.7795865 0.4570892 0.5119610 0.6099155 0.5538912 0.7826518 0.7330598 0.5649382 0.6198673 
      388       394       399       403       409       426       430       439       440 
0.6226361 0.7086743 0.7210360 0.7868891 0.5777561 0.9466254 0.8820884 0.7289142 0.6668101 
      443       452       454       458       472       474       485       495 
0.9112269 0.8668621 0.7539877 0.4812158 0.6352086 0.6344728 0.6376490 0.6407582 
RMSE(stepwise_grad_pred,grad_test$Chance.of.Admit) 
[1] 0.06357101

Regression Trees

library(rpart) 
reg_tree_data <- rpart(Chance.of.Admit ~ ., data = grad_train)
reg_tree_data
n= 402 

node), split, n, deviance, yval
      * denotes terminal node

 1) root 402 7.83451600 0.7217164  
   2) CGPA< 8.745 243 2.57536900 0.6374897  
     4) CGPA< 8.035 84 0.71878100 0.5495238  
       8) CGPA< 7.62 19 0.07925263 0.4715789 *
       9) CGPA>=7.62 65 0.49035380 0.5723077 *
     5) CGPA>=8.035 159 0.86320380 0.6839623  
      10) GRE.Score< 316.5 110 0.51888730 0.6665455 *
      11) GRE.Score>=316.5 49 0.23604080 0.7230612 *
   3) CGPA>=8.745 159 0.90066920 0.8504403  
     6) CGPA< 9.225 100 0.34433900 0.8069000  
      12) CGPA< 9.055 58 0.16556030 0.7815517 *
      13) CGPA>=9.055 42 0.09004762 0.8419048 *
     7) CGPA>=9.225 59 0.04544068 0.9242373 *
best<-reg_tree_data$cptable[which.min(reg_tree_data$cptable[,"xerror"]),"CP"] 
pruned_tree<-prune(reg_tree_data,cp=best) 
library(rpart.plot)
prp(pruned_tree) 

regtree_pred<-predict(reg_tree_data,grad_test)
regtree_pred 
       11        14        15        21        26        30        34        36        42 
0.7230612 0.5723077 0.6665455 0.5723077 0.9242373 0.4715789 0.9242373 0.8419048 0.6665455 
       44        46        48        49        51        55        57        66        85 
0.8419048 0.8419048 0.9242373 0.7815517 0.6665455 0.5723077 0.4715789 0.7815517 0.9242373 
       88        95       106       108       112       119       125       136       137 
0.7230612 0.5723077 0.7815517 0.9242373 0.7230612 0.4715789 0.6665455 0.7815517 0.6665455 
      149       151       155       161       162       165       184       188       191 
0.9242373 0.9242373 0.7815517 0.5723077 0.4715789 0.7815517 0.7815517 0.9242373 0.8419048 
      198       200       203       213       216       219       225       226       227 
0.6665455 0.6665455 0.9242373 0.9242373 0.9242373 0.7815517 0.6665455 0.5723077 0.6665455 
      228       230       233       235       240       252       258       264       266 
0.6665455 0.7815517 0.6665455 0.9242373 0.5723077 0.7815517 0.7230612 0.7815517 0.6665455 
      275       280       285       289       297       302       304       310       313 
0.5723077 0.6665455 0.9242373 0.7815517 0.6665455 0.7815517 0.7230612 0.6665455 0.7815517 
      316       318       319       330       334       336       339       340       341 
0.6665455 0.5723077 0.7815517 0.5723077 0.7230612 0.8419048 0.7230612 0.7230612 0.6665455 
      342       345       347       350       359       361       374       375       384 
0.7815517 0.4715789 0.5723077 0.6665455 0.5723077 0.7230612 0.7230612 0.5723077 0.6665455 
      388       394       399       403       409       426       430       439       440 
0.6665455 0.7815517 0.7815517 0.7815517 0.5723077 0.9242373 0.8419048 0.7230612 0.6665455 
      443       452       454       458       472       474       485       495 
0.9242373 0.9242373 0.7815517 0.4715789 0.6665455 0.6665455 0.5723077 0.6665455 
RMSE(regtree_pred,grad_test$Chance.of.Admit) 
[1] 0.0761262
set.seed(1)
lasso <- train(
Chance.of.Admit ~., data = grad_train, method = "glmnet",
trControl = trainControl("cv", number = 10),
preProcess=c("knnImpute","nzv"), 
tuneGrid = expand.grid(alpha = 1, lambda = 10^seq(3, -3, length = 100)))
Warning: There were missing values in resampled performance measures.
lasso 
glmnet 

402 samples
  7 predictor

Pre-processing: nearest neighbor imputation (7), centered (7), scaled (7) 
Resampling: Cross-Validated (10 fold) 
Summary of sample sizes: 362, 362, 362, 362, 361, 362, ... 
Resampling results across tuning parameters:

  lambda        RMSE        Rsquared   MAE       
  1.000000e-03  0.05844035  0.8256967  0.04271772
  1.149757e-03  0.05844336  0.8256813  0.04271774
  1.321941e-03  0.05844804  0.8256603  0.04271831
  1.519911e-03  0.05845381  0.8256385  0.04272107
  1.747528e-03  0.05846115  0.8256151  0.04272596
  2.009233e-03  0.05847283  0.8255784  0.04273183
  2.310130e-03  0.05848836  0.8255347  0.04273898
  2.656088e-03  0.05850942  0.8254803  0.04275093
  3.053856e-03  0.05853737  0.8254150  0.04276818
  3.511192e-03  0.05857575  0.8253281  0.04278774
  4.037017e-03  0.05862748  0.8252163  0.04282380
  4.641589e-03  0.05869698  0.8250715  0.04287250
  5.336699e-03  0.05879064  0.8248801  0.04295343
  6.135907e-03  0.05891427  0.8246381  0.04309988
  7.054802e-03  0.05907873  0.8243212  0.04328687
  8.111308e-03  0.05930161  0.8238785  0.04353845
  9.326033e-03  0.05959099  0.8233112  0.04385473
  1.072267e-02  0.05996209  0.8226054  0.04424391
  1.232847e-02  0.06044313  0.8217045  0.04477603
  1.417474e-02  0.06107827  0.8204596  0.04547532
  1.629751e-02  0.06190116  0.8188094  0.04640682
  1.873817e-02  0.06291427  0.8169383  0.04750457
  2.154435e-02  0.06413072  0.8151564  0.04880947
  2.477076e-02  0.06566001  0.8130866  0.05044575
  2.848036e-02  0.06758415  0.8103686  0.05242082
  3.274549e-02  0.06972183  0.8091795  0.05442703
  3.764936e-02  0.07245805  0.8075154  0.05685135
  4.328761e-02  0.07595067  0.8045982  0.05992593
  4.977024e-02  0.08030787  0.8002723  0.06364598
  5.722368e-02  0.08557094  0.7953372  0.06808347
  6.579332e-02  0.09172541  0.7914374  0.07326244
  7.564633e-02  0.09904572  0.7912617  0.07954846
  8.697490e-02  0.10795976  0.7912617  0.08699538
  1.000000e-01  0.11871977  0.7912617  0.09580405
  1.149757e-01  0.13159676  0.7912617  0.10623622
  1.321941e-01  0.13916960        NaN  0.11245643
  1.519911e-01  0.13916960        NaN  0.11245643
  1.747528e-01  0.13916960        NaN  0.11245643
  2.009233e-01  0.13916960        NaN  0.11245643
  2.310130e-01  0.13916960        NaN  0.11245643
  2.656088e-01  0.13916960        NaN  0.11245643
  3.053856e-01  0.13916960        NaN  0.11245643
  3.511192e-01  0.13916960        NaN  0.11245643
  4.037017e-01  0.13916960        NaN  0.11245643
  4.641589e-01  0.13916960        NaN  0.11245643
  5.336699e-01  0.13916960        NaN  0.11245643
  6.135907e-01  0.13916960        NaN  0.11245643
  7.054802e-01  0.13916960        NaN  0.11245643
  8.111308e-01  0.13916960        NaN  0.11245643
  9.326033e-01  0.13916960        NaN  0.11245643
  1.072267e+00  0.13916960        NaN  0.11245643
  1.232847e+00  0.13916960        NaN  0.11245643
  1.417474e+00  0.13916960        NaN  0.11245643
  1.629751e+00  0.13916960        NaN  0.11245643
  1.873817e+00  0.13916960        NaN  0.11245643
  2.154435e+00  0.13916960        NaN  0.11245643
  2.477076e+00  0.13916960        NaN  0.11245643
  2.848036e+00  0.13916960        NaN  0.11245643
  3.274549e+00  0.13916960        NaN  0.11245643
  3.764936e+00  0.13916960        NaN  0.11245643
  4.328761e+00  0.13916960        NaN  0.11245643
  4.977024e+00  0.13916960        NaN  0.11245643
  5.722368e+00  0.13916960        NaN  0.11245643
  6.579332e+00  0.13916960        NaN  0.11245643
  7.564633e+00  0.13916960        NaN  0.11245643
  8.697490e+00  0.13916960        NaN  0.11245643
  1.000000e+01  0.13916960        NaN  0.11245643
  1.149757e+01  0.13916960        NaN  0.11245643
  1.321941e+01  0.13916960        NaN  0.11245643
  1.519911e+01  0.13916960        NaN  0.11245643
  1.747528e+01  0.13916960        NaN  0.11245643
  2.009233e+01  0.13916960        NaN  0.11245643
  2.310130e+01  0.13916960        NaN  0.11245643
  2.656088e+01  0.13916960        NaN  0.11245643
  3.053856e+01  0.13916960        NaN  0.11245643
  3.511192e+01  0.13916960        NaN  0.11245643
  4.037017e+01  0.13916960        NaN  0.11245643
  4.641589e+01  0.13916960        NaN  0.11245643
  5.336699e+01  0.13916960        NaN  0.11245643
  6.135907e+01  0.13916960        NaN  0.11245643
  7.054802e+01  0.13916960        NaN  0.11245643
  8.111308e+01  0.13916960        NaN  0.11245643
  9.326033e+01  0.13916960        NaN  0.11245643
  1.072267e+02  0.13916960        NaN  0.11245643
  1.232847e+02  0.13916960        NaN  0.11245643
  1.417474e+02  0.13916960        NaN  0.11245643
  1.629751e+02  0.13916960        NaN  0.11245643
  1.873817e+02  0.13916960        NaN  0.11245643
  2.154435e+02  0.13916960        NaN  0.11245643
  2.477076e+02  0.13916960        NaN  0.11245643
  2.848036e+02  0.13916960        NaN  0.11245643
  3.274549e+02  0.13916960        NaN  0.11245643
  3.764936e+02  0.13916960        NaN  0.11245643
  4.328761e+02  0.13916960        NaN  0.11245643
  4.977024e+02  0.13916960        NaN  0.11245643
  5.722368e+02  0.13916960        NaN  0.11245643
  6.579332e+02  0.13916960        NaN  0.11245643
  7.564633e+02  0.13916960        NaN  0.11245643
  8.697490e+02  0.13916960        NaN  0.11245643
  1.000000e+03  0.13916960        NaN  0.11245643

Tuning parameter 'alpha' was held constant at a value of 1
RMSE was used to select the optimal model using the smallest value.
The final values used for the model were alpha = 1 and lambda = 0.001.
predic_lasso <- predict(lasso,grad_test) 
predic_lasso
       11        14        15        21        26        30        34        36        42 
0.7288252 0.6432182 0.6431649 0.6174217 0.9614574 0.4887597 0.9350614 0.8560910 0.6576563 
       44        46        48        49        51        55        57        66        85 
0.8424850 0.8314326 0.9385975 0.8052481 0.6797085 0.6569129 0.5409491 0.7854351 0.9277035 
       88        95       106       108       112       119       125       136       137 
0.6607434 0.5337918 0.7804155 0.9268720 0.7704649 0.4865263 0.6637982 0.7697311 0.6814626 
      149       151       155       161       162       165       184       188       191 
0.9512373 0.8993542 0.7662413 0.5730605 0.4991195 0.8303818 0.7491840 0.9115623 0.8452033 
      198       200       203       213       216       219       225       226       227 
0.6441425 0.7327043 0.9993506 0.9684642 0.9030216 0.8061326 0.6125591 0.5624156 0.6881214 
      228       230       233       235       240       252       258       264       266 
0.6921713 0.8062118 0.6590327 0.8797648 0.5406534 0.7224073 0.7537709 0.7507700 0.6844880 
      275       280       285       289       297       302       304       310       313 
0.5827045 0.6943250 0.9392536 0.7782298 0.7094425 0.7258533 0.7389975 0.7037587 0.7945425 
      316       318       319       330       334       336       339       340       341 
0.6107347 0.5618083 0.7598862 0.5219402 0.7341531 0.8437935 0.7829367 0.7717505 0.7016615 
      342       345       347       350       359       361       374       375       384 
0.7789475 0.4592514 0.5140939 0.6110671 0.5555279 0.7812550 0.7326958 0.5663206 0.6209754 
      388       394       399       403       409       426       430       439       440 
0.6237029 0.7101656 0.7218004 0.7866196 0.5772340 0.9448798 0.8800315 0.7289073 0.6681962 
      443       452       454       458       472       474       485       495 
0.9096085 0.8653975 0.7535204 0.4838545 0.6357812 0.6356516 0.6372856 0.6415410 
RMSE(predic_lasso, grad_test$Chance.of.Admit) 
[1] 0.06365759
coef(lasso$finalModel, lasso$bestTune$lambda) 
8 x 1 sparse Matrix of class "dgCMatrix"
                           s1
(Intercept)       0.721716418
GRE.Score         0.018691083
TOEFL.Score       0.018944858
University.Rating 0.005564949
SOP               .          
LOR               0.014606064
CGPA              0.073188806
Research1         0.011119258

Here we can see that only one variable has been shrunked to zero i.e SOP

set.seed(1)
ridge <- train(
Chance.of.Admit ~., data = grad_train, method = "glmnet",
trControl = trainControl("cv", number = 5),
na.action = na.pass, 
preProcess=c("knnImpute","nzv"),
tuneGrid = expand.grid(alpha = 0, lambda = 10^seq(-3, 3, length = 
100)))
Warning: There were missing values in resampled performance measures.
ridge 
glmnet 

402 samples
  7 predictor

Pre-processing: nearest neighbor imputation (7), centered (7), scaled (7) 
Resampling: Cross-Validated (5 fold) 
Summary of sample sizes: 321, 322, 322, 322, 321 
Resampling results across tuning parameters:

  lambda        RMSE        Rsquared   MAE       
  1.000000e-03  0.05958005  0.8152078  0.04298739
  1.149757e-03  0.05958005  0.8152078  0.04298739
  1.321941e-03  0.05958005  0.8152078  0.04298739
  1.519911e-03  0.05958005  0.8152078  0.04298739
  1.747528e-03  0.05958005  0.8152078  0.04298739
  2.009233e-03  0.05958005  0.8152078  0.04298739
  2.310130e-03  0.05958005  0.8152078  0.04298739
  2.656088e-03  0.05958005  0.8152078  0.04298739
  3.053856e-03  0.05958005  0.8152078  0.04298739
  3.511192e-03  0.05958005  0.8152078  0.04298739
  4.037017e-03  0.05958005  0.8152078  0.04298739
  4.641589e-03  0.05958005  0.8152078  0.04298739
  5.336699e-03  0.05958005  0.8152078  0.04298739
  6.135907e-03  0.05958005  0.8152078  0.04298739
  7.054802e-03  0.05958005  0.8152078  0.04298739
  8.111308e-03  0.05958005  0.8152078  0.04298739
  9.326033e-03  0.05958005  0.8152078  0.04298739
  1.072267e-02  0.05958005  0.8152078  0.04298739
  1.232847e-02  0.05959043  0.8151571  0.04299396
  1.417474e-02  0.05967166  0.8147684  0.04306573
  1.629751e-02  0.05976648  0.8143257  0.04316494
  1.873817e-02  0.05987980  0.8138168  0.04328614
  2.154435e-02  0.06001416  0.8132381  0.04342300
  2.477076e-02  0.06017117  0.8125929  0.04357596
  2.848036e-02  0.06035454  0.8118751  0.04375846
  3.274549e-02  0.06056542  0.8110970  0.04398263
  3.764936e-02  0.06080888  0.8102549  0.04423438
  4.328761e-02  0.06108809  0.8093619  0.04451967
  4.977024e-02  0.06141012  0.8084136  0.04484621
  5.722368e-02  0.06177904  0.8074313  0.04523795
  6.579332e-02  0.06220601  0.8064078  0.04570751
  7.564633e-02  0.06269757  0.8053687  0.04624362
  8.697490e-02  0.06326897  0.8043116  0.04684745
  1.000000e-01  0.06393139  0.8032552  0.04752161
  1.149757e-01  0.06470350  0.8021931  0.04830815
  1.321941e-01  0.06559749  0.8011549  0.04918573
  1.519911e-01  0.06663817  0.8001289  0.05016266
  1.747528e-01  0.06783717  0.7991444  0.05128029
  2.009233e-01  0.06922119  0.7981875  0.05261183
  2.310130e-01  0.07079620  0.7972858  0.05410944
  2.656088e-01  0.07258687  0.7964230  0.05573126
  3.053856e-01  0.07458621  0.7956236  0.05750769
  3.511192e-01  0.07681407  0.7948725  0.05946963
  4.037017e-01  0.07924082  0.7941857  0.06159016
  4.641589e-01  0.08187580  0.7935420  0.06387347
  5.336699e-01  0.08467122  0.7929621  0.06625494
  6.135907e-01  0.08762923  0.7924268  0.06879183
  7.054802e-01  0.09068061  0.7919499  0.07141497
  8.111308e-01  0.09382530  0.7915133  0.07407270
  9.326033e-01  0.09698162  0.7911280  0.07673188
  1.072267e+00  0.10015375  0.7907776  0.07941274
  1.232847e+00  0.10325722  0.7904707  0.08202276
  1.417474e+00  0.10630543  0.7901930  0.08456141
  1.629751e+00  0.10921968  0.7899513  0.08698312
  1.873817e+00  0.11202441  0.7897335  0.08931320
  2.154435e+00  0.11465232  0.7895449  0.09150907
  2.477076e+00  0.11713750  0.7893755  0.09359367
  2.848036e+00  0.11942626  0.7892294  0.09550703
  3.274549e+00  0.12155897  0.7890985  0.09728217
  3.764936e+00  0.12349509  0.7889860  0.09889616
  4.328761e+00  0.12527736  0.7888853  0.10038260
  4.977024e+00  0.12687643  0.7887989  0.10172392
  5.722368e+00  0.12833395  0.7887218  0.10297259
  6.579332e+00  0.12962930  0.7886558  0.10408237
  7.564633e+00  0.13080069  0.7885968  0.10508369
  8.697490e+00  0.13183393  0.7885465  0.10596524
  1.000000e+01  0.13276247  0.7885016  0.10675665
  1.149757e+01  0.13357667  0.7884633  0.10744936
  1.321941e+01  0.13430480  0.7884291  0.10806769
  1.519911e+01  0.13494031  0.7884000  0.10860667
  1.747528e+01  0.13550650  0.7883741  0.10908615
  2.009233e+01  0.13599890  0.7883520  0.10950276
  2.310130e+01  0.13643630  0.7883323  0.10987408
  2.656088e+01  0.13681566  0.7883155  0.11019873
  3.053856e+01  0.13715189  0.7883006  0.11048629
  3.511192e+01  0.13744288  0.7882879  0.11073572
  4.037017e+01  0.13770035  0.7882766  0.11095665
  4.641589e+01  0.13792281  0.7882669  0.11114749
  5.336699e+01  0.13811938  0.7882584  0.11131679
  6.135907e+01  0.13828903  0.7882511  0.11146320
  7.054802e+01  0.13843878  0.7882446  0.11159234
  8.111308e+01  0.13856789  0.7882391  0.11170389
  9.326033e+01  0.13868178  0.7882342  0.11180225
  1.072267e+02  0.13877990  0.7882300  0.11188702
  1.232847e+02  0.13938685  0.7943567  0.11240929
  1.417474e+02  0.13944430        NaN  0.11246042
  1.629751e+02  0.13944430        NaN  0.11246042
  1.873817e+02  0.13944430        NaN  0.11246042
  2.154435e+02  0.13944430        NaN  0.11246042
  2.477076e+02  0.13944430        NaN  0.11246042
  2.848036e+02  0.13944430        NaN  0.11246042
  3.274549e+02  0.13944430        NaN  0.11246042
  3.764936e+02  0.13944430        NaN  0.11246042
  4.328761e+02  0.13944430        NaN  0.11246042
  4.977024e+02  0.13944430        NaN  0.11246042
  5.722368e+02  0.13944430        NaN  0.11246042
  6.579332e+02  0.13944430        NaN  0.11246042
  7.564633e+02  0.13944430        NaN  0.11246042
  8.697490e+02  0.13944430        NaN  0.11246042
  1.000000e+03  0.13944430        NaN  0.11246042

Tuning parameter 'alpha' was held constant at a value of 0
RMSE was used to select the optimal model using the smallest value.
The final values used for the model were alpha = 0 and lambda = 0.01072267.
predict_ridge <- predict(ridge,grad_test) 
predict_ridge
       11        14        15        21        26        30        34        36        42 
0.7383161 0.6573223 0.6477232 0.6306653 0.9613125 0.5041399 0.9294257 0.8553772 0.6600205 
       44        46        48        49        51        55        57        66        85 
0.8454537 0.8326065 0.9319931 0.8045881 0.6804714 0.6738506 0.5636302 0.7838771 0.9290025 
       88        95       106       108       112       119       125       136       137 
0.6642772 0.5414475 0.7801769 0.9224142 0.7764242 0.5034766 0.6573040 0.7687810 0.6849925 
      149       151       155       161       162       165       184       188       191 
0.9372277 0.8930724 0.7596101 0.5747198 0.5040074 0.8333033 0.7447648 0.9109064 0.8446382 
      198       200       203       213       216       219       225       226       227 
0.6419705 0.7301122 0.9907713 0.9654422 0.9045293 0.8021941 0.6081284 0.5557924 0.6856187 
      228       230       233       235       240       252       258       264       266 
0.6883596 0.8008708 0.6585854 0.8802089 0.5331113 0.6987381 0.7563101 0.7446847 0.6717107 
      275       280       285       289       297       302       304       310       313 
0.5826590 0.6776499 0.9320208 0.7720329 0.7029512 0.7147364 0.7432428 0.7017825 0.7888148 
      316       318       319       330       334       336       339       340       341 
0.6116043 0.5502563 0.7543705 0.5162856 0.7360127 0.8416508 0.7903067 0.7797105 0.7013524 
      342       345       347       350       359       361       374       375       384 
0.7803370 0.4655833 0.5166176 0.6160169 0.5694918 0.7882182 0.7350915 0.5797948 0.6162109 
      388       394       399       403       409       426       430       439       440 
0.6232158 0.6975730 0.7118986 0.7808922 0.5875909 0.9250432 0.8934258 0.7244715 0.6577142 
      443       452       454       458       472       474       485       495 
0.9051600 0.8615598 0.7461144 0.4834334 0.6397740 0.6419861 0.6555675 0.6294396 
RMSE(predict_ridge,grad_test$Chance.of.Admit) 
[1] 0.06335264
set.seed(1)
enet <- train(
Chance.of.Admit~., data = grad_train, method = "glmnet", 
trControl = trainControl("cv", number = 10),
preProcess=c("knnImpute","nzv"),
tuneGrid = expand.grid(alpha =seq(0,1, length=10), lambda = 10^seq(-
3, 3, length = 100)))
Warning: There were missing values in resampled performance measures.
enet 
glmnet 

402 samples
  7 predictor

Pre-processing: nearest neighbor imputation (7), centered (7), scaled (7) 
Resampling: Cross-Validated (10 fold) 
Summary of sample sizes: 362, 362, 362, 362, 361, 362, ... 
Resampling results across tuning parameters:

  alpha      lambda        RMSE        Rsquared   MAE       
  0.0000000  1.000000e-03  0.05881759  0.8237503  0.04308612
  0.0000000  1.149757e-03  0.05881759  0.8237503  0.04308612
  0.0000000  1.321941e-03  0.05881759  0.8237503  0.04308612
  0.0000000  1.519911e-03  0.05881759  0.8237503  0.04308612
  0.0000000  1.747528e-03  0.05881759  0.8237503  0.04308612
  0.0000000  2.009233e-03  0.05881759  0.8237503  0.04308612
  0.0000000  2.310130e-03  0.05881759  0.8237503  0.04308612
  0.0000000  2.656088e-03  0.05881759  0.8237503  0.04308612
  0.0000000  3.053856e-03  0.05881759  0.8237503  0.04308612
  0.0000000  3.511192e-03  0.05881759  0.8237503  0.04308612
  0.0000000  4.037017e-03  0.05881759  0.8237503  0.04308612
  0.0000000  4.641589e-03  0.05881759  0.8237503  0.04308612
  0.0000000  5.336699e-03  0.05881759  0.8237503  0.04308612
  0.0000000  6.135907e-03  0.05881759  0.8237503  0.04308612
  0.0000000  7.054802e-03  0.05881759  0.8237503  0.04308612
  0.0000000  8.111308e-03  0.05881759  0.8237503  0.04308612
  0.0000000  9.326033e-03  0.05881759  0.8237503  0.04308612
  0.0000000  1.072267e-02  0.05881759  0.8237503  0.04308612
  0.0000000  1.232847e-02  0.05882148  0.8237389  0.04308917
  0.0000000  1.417474e-02  0.05891080  0.8233389  0.04318362
  0.0000000  1.629751e-02  0.05901699  0.8228658  0.04329123
  0.0000000  1.873817e-02  0.05914112  0.8223285  0.04340251
  0.0000000  2.154435e-02  0.05928722  0.8217127  0.04352650
  0.0000000  2.477076e-02  0.05945497  0.8210316  0.04366934
  0.0000000  2.848036e-02  0.05964932  0.8202717  0.04384516
  0.0000000  3.274549e-02  0.05986959  0.8194540  0.04406751
  0.0000000  3.764936e-02  0.06012228  0.8185664  0.04431817
  0.0000000  4.328761e-02  0.06040867  0.8176303  0.04461307
  0.0000000  4.977024e-02  0.06073679  0.8166366  0.04494250
  0.0000000  5.722368e-02  0.06110840  0.8156179  0.04531927
  0.0000000  6.579332e-02  0.06153689  0.8145541  0.04578883
  0.0000000  7.564633e-02  0.06202592  0.8134869  0.04633370
  0.0000000  8.697490e-02  0.06259395  0.8123985  0.04693637
  0.0000000  1.000000e-01  0.06324850  0.8113245  0.04760866
  0.0000000  1.149757e-01  0.06401291  0.8102425  0.04840084
  0.0000000  1.321941e-01  0.06489615  0.8092002  0.04929815
  0.0000000  1.519911e-01  0.06592812  0.8081669  0.05030865
  0.0000000  1.747528e-01  0.06711720  0.8071894  0.05141071
  0.0000000  2.009233e-01  0.06849633  0.8062352  0.05269803
  0.0000000  2.310130e-01  0.07006700  0.8053488  0.05415389
  0.0000000  2.656088e-01  0.07186141  0.8044958  0.05573899
  0.0000000  3.053856e-01  0.07386545  0.8037166  0.05751213
  0.0000000  3.511192e-01  0.07610895  0.8029795  0.05946239
  0.0000000  4.037017e-01  0.07854911  0.8023146  0.06158415
  0.0000000  4.641589e-01  0.08120920  0.8016869  0.06388054
  0.0000000  5.336699e-01  0.08402533  0.8011293  0.06625422
  0.0000000  6.135907e-01  0.08701519  0.8006097  0.06877968
  0.0000000  7.054802e-01  0.09008968  0.8001534  0.07138350
  0.0000000  8.111308e-01  0.09326871  0.7997311  0.07403692
  0.0000000  9.326033e-01  0.09644685  0.7993639  0.07669628
  0.0000000  1.072267e+00  0.09965238  0.7990260  0.07936420
  0.0000000  1.232847e+00  0.10277422  0.7987344  0.08197036
  0.0000000  1.417474e+00  0.10585275  0.7984672  0.08451119
  0.0000000  1.629751e+00  0.10878123  0.7982382  0.08693528
  0.0000000  1.873817e+00  0.11161227  0.7980289  0.08927534
  0.0000000  2.154435e+00  0.11425053  0.7978506  0.09147850
  0.0000000  2.477076e+00  0.11675781  0.7976880  0.09357840
  0.0000000  2.848036e+00  0.11905376  0.7975500  0.09548820
  0.0000000  3.274549e+00  0.12120461  0.7974245  0.09727544
  0.0000000  3.764936e+00  0.12314556  0.7973185  0.09888599
  0.0000000  4.328761e+00  0.12494245  0.7972220  0.10037330
  0.0000000  4.977024e+00  0.12654469  0.7971406  0.10171747
  0.0000000  5.722368e+00  0.12801382  0.7970667  0.10296698
  0.0000000  6.579332e+00  0.12931122  0.7970045  0.10407376
  0.0000000  7.564633e+00  0.13049174  0.7969481  0.10507791
  0.0000000  8.697490e+00  0.13152629  0.7969007  0.10595569
  0.0000000  1.000000e+01  0.13246196  0.7968578  0.10674784
  0.0000000  1.149757e+01  0.13327699  0.7968217  0.10743754
  0.0000000  1.321941e+01  0.13401063  0.7967891  0.10805807
  0.0000000  1.519911e+01  0.13464668  0.7967617  0.10859582
  0.0000000  1.747528e+01  0.13521711  0.7967369  0.10907749
  0.0000000  2.009233e+01  0.13570986  0.7967161  0.10949377
  0.0000000  2.310130e+01  0.13615052  0.7966972  0.10986906
  0.0000000  2.656088e+01  0.13653010  0.7966815  0.11019212
  0.0000000  3.053856e+01  0.13686882  0.7966672  0.11048117
  0.0000000  3.511192e+01  0.13715996  0.7966552  0.11073137
  0.0000000  4.037017e+01  0.13741933  0.7966444  0.11095413
  0.0000000  4.641589e+01  0.13764189  0.7966354  0.11114521
  0.0000000  5.336699e+01  0.13783991  0.7966272  0.11131514
  0.0000000  6.135907e+01  0.13800962  0.7966203  0.11146073
  0.0000000  7.054802e+01  0.13816047  0.7966141  0.11159010
  0.0000000  8.111308e+01  0.13828963  0.7966089  0.11170084
  0.0000000  9.326033e+01  0.13840435  0.7966042  0.11179918
  0.0000000  1.072267e+02  0.13850250  0.7966003  0.11188330
  0.0000000  1.232847e+02  0.13912854  0.8126850  0.11242051
  0.0000000  1.417474e+02  0.13916960        NaN  0.11245643
  0.0000000  1.629751e+02  0.13916960        NaN  0.11245643
  0.0000000  1.873817e+02  0.13916960        NaN  0.11245643
  0.0000000  2.154435e+02  0.13916960        NaN  0.11245643
  0.0000000  2.477076e+02  0.13916960        NaN  0.11245643
  0.0000000  2.848036e+02  0.13916960        NaN  0.11245643
  0.0000000  3.274549e+02  0.13916960        NaN  0.11245643
  0.0000000  3.764936e+02  0.13916960        NaN  0.11245643
  0.0000000  4.328761e+02  0.13916960        NaN  0.11245643
  0.0000000  4.977024e+02  0.13916960        NaN  0.11245643
  0.0000000  5.722368e+02  0.13916960        NaN  0.11245643
  0.0000000  6.579332e+02  0.13916960        NaN  0.11245643
  0.0000000  7.564633e+02  0.13916960        NaN  0.11245643
  0.0000000  8.697490e+02  0.13916960        NaN  0.11245643
  0.0000000  1.000000e+03  0.13916960        NaN  0.11245643
  0.1111111  1.000000e-03  0.05851117  0.8252870  0.04279782
  0.1111111  1.149757e-03  0.05850370  0.8253322  0.04279719
  0.1111111  1.321941e-03  0.05849770  0.8253624  0.04279524
  0.1111111  1.519911e-03  0.05849016  0.8254035  0.04279259
  0.1111111  1.747528e-03  0.05848415  0.8254372  0.04279153
  0.1111111  2.009233e-03  0.05847789  0.8254735  0.04279097
  0.1111111  2.310130e-03  0.05847106  0.8255130  0.04279061
  0.1111111  2.656088e-03  0.05846742  0.8255362  0.04279206
  0.1111111  3.053856e-03  0.05846489  0.8255549  0.04279365
  0.1111111  3.511192e-03  0.05846387  0.8255673  0.04279607
  0.1111111  4.037017e-03  0.05846890  0.8255449  0.04280349
  0.1111111  4.641589e-03  0.05847851  0.8255018  0.04281278
  0.1111111  5.336699e-03  0.05849972  0.8254013  0.04282408
  0.1111111  6.135907e-03  0.05852760  0.8252764  0.04284148
  0.1111111  7.054802e-03  0.05856158  0.8251327  0.04286124
  0.1111111  8.111308e-03  0.05860452  0.8249594  0.04288538
  0.1111111  9.326033e-03  0.05865927  0.8247451  0.04292100
  0.1111111  1.072267e-02  0.05872537  0.8245037  0.04297700
  0.1111111  1.232847e-02  0.05880563  0.8242278  0.04304244
  0.1111111  1.417474e-02  0.05890040  0.8239281  0.04313435
  0.1111111  1.629751e-02  0.05901633  0.8235824  0.04324917
  0.1111111  1.873817e-02  0.05915889  0.8231791  0.04337560
  0.1111111  2.154435e-02  0.05933242  0.8227198  0.04353123
  0.1111111  2.477076e-02  0.05954287  0.8222020  0.04372895
  0.1111111  2.848036e-02  0.05979610  0.8216344  0.04401415
  0.1111111  3.274549e-02  0.06010241  0.8210089  0.04436624
  0.1111111  3.764936e-02  0.06046969  0.8203455  0.04478439
  0.1111111  4.328761e-02  0.06091335  0.8196369  0.04530743
  0.1111111  4.977024e-02  0.06144675  0.8189046  0.04592588
  0.1111111  5.722368e-02  0.06209256  0.8181423  0.04665999
  0.1111111  6.579332e-02  0.06287080  0.8173763  0.04750478
  0.1111111  7.564633e-02  0.06381331  0.8166033  0.04845152
  0.1111111  8.697490e-02  0.06494830  0.8158510  0.04955120
  0.1111111  1.000000e-01  0.06631797  0.8151103  0.05083300
  0.1111111  1.149757e-01  0.06795543  0.8144208  0.05232508
  0.1111111  1.321941e-01  0.06991219  0.8137667  0.05411890
  0.1111111  1.519911e-01  0.07222000  0.8131831  0.05623787
  0.1111111  1.747528e-01  0.07493334  0.8126646  0.05864948
  0.1111111  2.009233e-01  0.07806981  0.8122446  0.06135486
  0.1111111  2.310130e-01  0.08167658  0.8119113  0.06449966
  0.1111111  2.656088e-01  0.08574408  0.8116905  0.06792516
  0.1111111  3.053856e-01  0.09030938  0.8115606  0.07173951
  0.1111111  3.511192e-01  0.09531809  0.8115219  0.07590577
  0.1111111  4.037017e-01  0.10079529  0.8114714  0.08042341
  0.1111111  4.641589e-01  0.10664018  0.8111915  0.08524241
  0.1111111  5.336699e-01  0.11281531  0.8106218  0.09041121
  0.1111111  6.135907e-01  0.11898582  0.8113314  0.09550829
  0.1111111  7.054802e-01  0.12531452  0.8079014  0.10076807
  0.1111111  8.111308e-01  0.13077129  0.8066617  0.10536046
  0.1111111  9.326033e-01  0.13542532  0.8133946  0.10929563
  0.1111111  1.072267e+00  0.13863996  0.7917875  0.11201427
  0.1111111  1.232847e+00  0.13916960        NaN  0.11245643
  0.1111111  1.417474e+00  0.13916960        NaN  0.11245643
  0.1111111  1.629751e+00  0.13916960        NaN  0.11245643
  0.1111111  1.873817e+00  0.13916960        NaN  0.11245643
  0.1111111  2.154435e+00  0.13916960        NaN  0.11245643
  0.1111111  2.477076e+00  0.13916960        NaN  0.11245643
  0.1111111  2.848036e+00  0.13916960        NaN  0.11245643
  0.1111111  3.274549e+00  0.13916960        NaN  0.11245643
  0.1111111  3.764936e+00  0.13916960        NaN  0.11245643
  0.1111111  4.328761e+00  0.13916960        NaN  0.11245643
  0.1111111  4.977024e+00  0.13916960        NaN  0.11245643
  0.1111111  5.722368e+00  0.13916960        NaN  0.11245643
  0.1111111  6.579332e+00  0.13916960        NaN  0.11245643
  0.1111111  7.564633e+00  0.13916960        NaN  0.11245643
  0.1111111  8.697490e+00  0.13916960        NaN  0.11245643
  0.1111111  1.000000e+01  0.13916960        NaN  0.11245643
  0.1111111  1.149757e+01  0.13916960        NaN  0.11245643
  0.1111111  1.321941e+01  0.13916960        NaN  0.11245643
  0.1111111  1.519911e+01  0.13916960        NaN  0.11245643
  0.1111111  1.747528e+01  0.13916960        NaN  0.11245643
  0.1111111  2.009233e+01  0.13916960        NaN  0.11245643
  0.1111111  2.310130e+01  0.13916960        NaN  0.11245643
  0.1111111  2.656088e+01  0.13916960        NaN  0.11245643
  0.1111111  3.053856e+01  0.13916960        NaN  0.11245643
  0.1111111  3.511192e+01  0.13916960        NaN  0.11245643
  0.1111111  4.037017e+01  0.13916960        NaN  0.11245643
  0.1111111  4.641589e+01  0.13916960        NaN  0.11245643
  0.1111111  5.336699e+01  0.13916960        NaN  0.11245643
  0.1111111  6.135907e+01  0.13916960        NaN  0.11245643
  0.1111111  7.054802e+01  0.13916960        NaN  0.11245643
  0.1111111  8.111308e+01  0.13916960        NaN  0.11245643
  0.1111111  9.326033e+01  0.13916960        NaN  0.11245643
  0.1111111  1.072267e+02  0.13916960        NaN  0.11245643
  0.1111111  1.232847e+02  0.13916960        NaN  0.11245643
  0.1111111  1.417474e+02  0.13916960        NaN  0.11245643
  0.1111111  1.629751e+02  0.13916960        NaN  0.11245643
  0.1111111  1.873817e+02  0.13916960        NaN  0.11245643
  0.1111111  2.154435e+02  0.13916960        NaN  0.11245643
  0.1111111  2.477076e+02  0.13916960        NaN  0.11245643
  0.1111111  2.848036e+02  0.13916960        NaN  0.11245643
  0.1111111  3.274549e+02  0.13916960        NaN  0.11245643
  0.1111111  3.764936e+02  0.13916960        NaN  0.11245643
  0.1111111  4.328761e+02  0.13916960        NaN  0.11245643
  0.1111111  4.977024e+02  0.13916960        NaN  0.11245643
  0.1111111  5.722368e+02  0.13916960        NaN  0.11245643
  0.1111111  6.579332e+02  0.13916960        NaN  0.11245643
  0.1111111  7.564633e+02  0.13916960        NaN  0.11245643
  0.1111111  8.697490e+02  0.13916960        NaN  0.11245643
  0.1111111  1.000000e+03  0.13916960        NaN  0.11245643
 [ reached getOption("max.print") -- omitted 800 rows ]

RMSE was used to select the optimal model using the smallest value.
The final values used for the model were alpha = 0.3333333 and lambda = 0.00231013.
pred_elast<-predict(enet,grad_test)
pred_elast 
       11        14        15        21        26        30        34        36        42 
0.7307465 0.6450570 0.6433971 0.6195222 0.9620887 0.4917636 0.9347993 0.8558311 0.6581319 
       44        46        48        49        51        55        57        66        85 
0.8432034 0.8313434 0.9379616 0.8055300 0.6803097 0.6603104 0.5455337 0.7855285 0.9285293 
       88        95       106       108       112       119       125       136       137 
0.6610830 0.5352317 0.7802732 0.9270345 0.7715726 0.4889842 0.6627475 0.7696968 0.6811912 
      149       151       155       161       162       165       184       188       191 
0.9492379 0.8986822 0.7655315 0.5736634 0.5000277 0.8309155 0.7481660 0.9117349 0.8451323 
      198       200       203       213       216       219       225       226       227 
0.6431701 0.7320558 0.9984843 0.9682358 0.9033779 0.8059912 0.6112015 0.5607360 0.6873610 
      228       230       233       235       240       252       258       264       266 
0.6911161 0.8057260 0.6591726 0.8797874 0.5391769 0.7183195 0.7543101 0.7499503 0.6823289 
      275       280       285       289       297       302       304       310       313 
0.5828118 0.6911618 0.9381219 0.7766523 0.7080732 0.7241821 0.7398410 0.7032113 0.7931392 
      316       318       319       330       334       336       339       340       341 
0.6108615 0.5600044 0.7592754 0.5202054 0.7347468 0.8436973 0.7844755 0.7736708 0.7015713 
      342       345       347       350       359       361       374       375       384 
0.7793977 0.4602287 0.5146372 0.6121509 0.5579296 0.7826524 0.7333404 0.5690322 0.6198343 
      388       394       399       403       409       426       430       439       440 
0.6239470 0.7078683 0.7199349 0.7856349 0.5793230 0.9418407 0.8827413 0.7283620 0.6669491 
      443       452       454       458       472       474       485       495 
0.9090544 0.8647360 0.7527784 0.4831566 0.6371478 0.6362453 0.6403479 0.6390252 
RMSE(pred_elast,grad_test$Chance.of.Admit) 
[1] 0.06355819
set.seed(1)
grad_rf <- train(Chance.of.Admit ~ ., data = grad_train, method = "rf", trControl = trainControl(method = "cv",number = 10),preProcess=c("knnImpute","nzv"), tuneGrid = expand.grid(mtry=c(2,4,8)))
Warning: invalid mtry: reset to within valid rangeWarning: invalid mtry: reset to within valid rangeWarning: invalid mtry: reset to within valid rangeWarning: invalid mtry: reset to within valid rangeWarning: invalid mtry: reset to within valid rangeWarning: invalid mtry: reset to within valid rangeWarning: invalid mtry: reset to within valid rangeWarning: invalid mtry: reset to within valid rangeWarning: invalid mtry: reset to within valid rangeWarning: invalid mtry: reset to within valid range
grad_rf 
Random Forest 

402 samples
  7 predictor

Pre-processing: nearest neighbor imputation (7), centered (7), scaled (7) 
Resampling: Cross-Validated (10 fold) 
Summary of sample sizes: 362, 362, 362, 362, 361, 362, ... 
Resampling results across tuning parameters:

  mtry  RMSE        Rsquared   MAE       
  2     0.06113433  0.8106544  0.04372467
  4     0.06181779  0.8070121  0.04397187
  8     0.06394504  0.7955204  0.04526479

RMSE was used to select the optimal model using the smallest value.
The final value used for the model was mtry = 2.
pred_forest<-predict(grad_rf,grad_test)
pred_forest 
       11        14        15        21        26        30        34        36        42 
0.7311643 0.5931518 0.6618373 0.6020852 0.9473801 0.5183097 0.9352422 0.8869684 0.6678575 
       44        46        48        49        51        55        57        66        85 
0.8666588 0.8579473 0.9304571 0.7978142 0.6734551 0.6589168 0.5695785 0.7551076 0.9298052 
       88        95       106       108       112       119       125       136       137 
0.6670416 0.5090627 0.7607666 0.9041707 0.7775654 0.5151269 0.6367974 0.7509968 0.7008946 
      149       151       155       161       162       165       184       188       191 
0.9433718 0.9116872 0.7359377 0.5544387 0.5014605 0.8387913 0.7401450 0.9286825 0.8627904 
      198       200       203       213       216       219       225       226       227 
0.6757486 0.7312598 0.9574533 0.9477589 0.9174141 0.7853236 0.6368809 0.5677799 0.6836185 
      228       230       233       235       240       252       258       264       266 
0.6965371 0.7772149 0.6937926 0.9014371 0.5310771 0.7164965 0.7293296 0.7578970 0.7015132 
      275       280       285       289       297       302       304       310       313 
0.5828838 0.6765667 0.9342921 0.7653740 0.7170788 0.7143537 0.7210006 0.6861478 0.7424628 
      316       318       319       330       334       336       339       340       341 
0.6347526 0.5748202 0.7583874 0.5116873 0.7309068 0.8405791 0.7863762 0.7556879 0.6900221 
      342       345       347       350       359       361       374       375       384 
0.7631154 0.4560993 0.4965240 0.6225856 0.5668772 0.7634932 0.7334619 0.5905744 0.6333655 
      388       394       399       403       409       426       430       439       440 
0.6421206 0.7064300 0.7575430 0.7820022 0.5513428 0.9114435 0.8951666 0.7085635 0.6624390 
      443       452       454       458       472       474       485       495 
0.9191935 0.8932111 0.7498095 0.4992971 0.6660607 0.6415158 0.6328317 0.6330138 
RMSE(pred_forest, grad_test$Chance.of.Admit) 
[1] 0.06240536
varImp(grad_rf)
rf variable importance

We can se that there are 6 variables which are most important out of these CGPA is the most important

set.seed(1)

gbm <- train(
Chance.of.Admit ~., data = grad_train, method = "gbm",na.action = na.pass,
trControl = trainControl("cv", number = 10))
Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0175             nan     0.1000    0.0021
     2        0.0157             nan     0.1000    0.0017
     3        0.0142             nan     0.1000    0.0014
     4        0.0130             nan     0.1000    0.0012
     5        0.0120             nan     0.1000    0.0010
     6        0.0111             nan     0.1000    0.0007
     7        0.0103             nan     0.1000    0.0008
     8        0.0096             nan     0.1000    0.0008
     9        0.0090             nan     0.1000    0.0006
    10        0.0084             nan     0.1000    0.0004
    20        0.0052             nan     0.1000    0.0002
    40        0.0035             nan     0.1000    0.0000
    60        0.0032             nan     0.1000   -0.0000
    80        0.0031             nan     0.1000   -0.0000
   100        0.0030             nan     0.1000   -0.0000
   120        0.0030             nan     0.1000   -0.0000
   140        0.0029             nan     0.1000   -0.0000
   150        0.0029             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0172             nan     0.1000    0.0027
     2        0.0152             nan     0.1000    0.0019
     3        0.0134             nan     0.1000    0.0018
     4        0.0118             nan     0.1000    0.0015
     5        0.0105             nan     0.1000    0.0012
     6        0.0095             nan     0.1000    0.0010
     7        0.0087             nan     0.1000    0.0009
     8        0.0079             nan     0.1000    0.0007
     9        0.0071             nan     0.1000    0.0007
    10        0.0066             nan     0.1000    0.0004
    20        0.0039             nan     0.1000    0.0001
    40        0.0031             nan     0.1000   -0.0000
    60        0.0029             nan     0.1000   -0.0000
    80        0.0028             nan     0.1000   -0.0000
   100        0.0027             nan     0.1000   -0.0000
   120        0.0026             nan     0.1000   -0.0000
   140        0.0026             nan     0.1000   -0.0000
   150        0.0025             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0170             nan     0.1000    0.0026
     2        0.0147             nan     0.1000    0.0024
     3        0.0128             nan     0.1000    0.0019
     4        0.0113             nan     0.1000    0.0015
     5        0.0100             nan     0.1000    0.0013
     6        0.0088             nan     0.1000    0.0010
     7        0.0079             nan     0.1000    0.0008
     8        0.0071             nan     0.1000    0.0006
     9        0.0065             nan     0.1000    0.0006
    10        0.0059             nan     0.1000    0.0004
    20        0.0036             nan     0.1000    0.0001
    40        0.0030             nan     0.1000   -0.0000
    60        0.0028             nan     0.1000   -0.0000
    80        0.0027             nan     0.1000   -0.0000
   100        0.0025             nan     0.1000   -0.0000
   120        0.0024             nan     0.1000   -0.0000
   140        0.0023             nan     0.1000   -0.0000
   150        0.0022             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0172             nan     0.1000    0.0021
     2        0.0154             nan     0.1000    0.0015
     3        0.0139             nan     0.1000    0.0013
     4        0.0126             nan     0.1000    0.0012
     5        0.0116             nan     0.1000    0.0009
     6        0.0108             nan     0.1000    0.0007
     7        0.0100             nan     0.1000    0.0007
     8        0.0094             nan     0.1000    0.0006
     9        0.0088             nan     0.1000    0.0005
    10        0.0082             nan     0.1000    0.0005
    20        0.0053             nan     0.1000    0.0001
    40        0.0037             nan     0.1000    0.0000
    60        0.0033             nan     0.1000   -0.0000
    80        0.0032             nan     0.1000   -0.0000
   100        0.0032             nan     0.1000    0.0000
   120        0.0031             nan     0.1000   -0.0000
   140        0.0031             nan     0.1000   -0.0000
   150        0.0030             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0169             nan     0.1000    0.0025
     2        0.0148             nan     0.1000    0.0019
     3        0.0129             nan     0.1000    0.0018
     4        0.0115             nan     0.1000    0.0014
     5        0.0103             nan     0.1000    0.0012
     6        0.0093             nan     0.1000    0.0010
     7        0.0085             nan     0.1000    0.0008
     8        0.0077             nan     0.1000    0.0006
     9        0.0071             nan     0.1000    0.0006
    10        0.0066             nan     0.1000    0.0005
    20        0.0041             nan     0.1000    0.0001
    40        0.0032             nan     0.1000   -0.0000
    60        0.0031             nan     0.1000   -0.0000
    80        0.0029             nan     0.1000   -0.0000
   100        0.0028             nan     0.1000   -0.0000
   120        0.0028             nan     0.1000   -0.0000
   140        0.0027             nan     0.1000   -0.0000
   150        0.0027             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0166             nan     0.1000    0.0028
     2        0.0144             nan     0.1000    0.0019
     3        0.0125             nan     0.1000    0.0017
     4        0.0110             nan     0.1000    0.0015
     5        0.0098             nan     0.1000    0.0011
     6        0.0087             nan     0.1000    0.0010
     7        0.0079             nan     0.1000    0.0008
     8        0.0072             nan     0.1000    0.0007
     9        0.0067             nan     0.1000    0.0005
    10        0.0062             nan     0.1000    0.0004
    20        0.0037             nan     0.1000    0.0001
    40        0.0030             nan     0.1000   -0.0000
    60        0.0028             nan     0.1000   -0.0000
    80        0.0027             nan     0.1000   -0.0000
   100        0.0026             nan     0.1000   -0.0000
   120        0.0024             nan     0.1000   -0.0000
   140        0.0023             nan     0.1000   -0.0000
   150        0.0023             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0178             nan     0.1000    0.0020
     2        0.0162             nan     0.1000    0.0018
     3        0.0146             nan     0.1000    0.0014
     4        0.0134             nan     0.1000    0.0012
     5        0.0125             nan     0.1000    0.0010
     6        0.0115             nan     0.1000    0.0008
     7        0.0106             nan     0.1000    0.0008
     8        0.0099             nan     0.1000    0.0007
     9        0.0093             nan     0.1000    0.0006
    10        0.0087             nan     0.1000    0.0005
    20        0.0054             nan     0.1000    0.0002
    40        0.0037             nan     0.1000    0.0000
    60        0.0034             nan     0.1000   -0.0000
    80        0.0033             nan     0.1000   -0.0000
   100        0.0033             nan     0.1000   -0.0000
   120        0.0032             nan     0.1000   -0.0000
   140        0.0032             nan     0.1000   -0.0000
   150        0.0031             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0172             nan     0.1000    0.0026
     2        0.0151             nan     0.1000    0.0021
     3        0.0132             nan     0.1000    0.0019
     4        0.0118             nan     0.1000    0.0014
     5        0.0106             nan     0.1000    0.0011
     6        0.0096             nan     0.1000    0.0009
     7        0.0087             nan     0.1000    0.0007
     8        0.0079             nan     0.1000    0.0008
     9        0.0071             nan     0.1000    0.0006
    10        0.0066             nan     0.1000    0.0005
    20        0.0041             nan     0.1000    0.0001
    40        0.0032             nan     0.1000   -0.0000
    60        0.0031             nan     0.1000   -0.0000
    80        0.0030             nan     0.1000   -0.0000
   100        0.0029             nan     0.1000   -0.0000
   120        0.0028             nan     0.1000   -0.0000
   140        0.0027             nan     0.1000   -0.0000
   150        0.0027             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0170             nan     0.1000    0.0028
     2        0.0146             nan     0.1000    0.0022
     3        0.0128             nan     0.1000    0.0019
     4        0.0113             nan     0.1000    0.0015
     5        0.0100             nan     0.1000    0.0012
     6        0.0088             nan     0.1000    0.0010
     7        0.0080             nan     0.1000    0.0008
     8        0.0072             nan     0.1000    0.0007
     9        0.0066             nan     0.1000    0.0005
    10        0.0061             nan     0.1000    0.0004
    20        0.0037             nan     0.1000    0.0000
    40        0.0030             nan     0.1000   -0.0000
    60        0.0028             nan     0.1000   -0.0000
    80        0.0027             nan     0.1000   -0.0000
   100        0.0025             nan     0.1000   -0.0000
   120        0.0025             nan     0.1000   -0.0000
   140        0.0023             nan     0.1000   -0.0000
   150        0.0023             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0179             nan     0.1000    0.0020
     2        0.0162             nan     0.1000    0.0016
     3        0.0148             nan     0.1000    0.0012
     4        0.0135             nan     0.1000    0.0013
     5        0.0123             nan     0.1000    0.0011
     6        0.0114             nan     0.1000    0.0009
     7        0.0106             nan     0.1000    0.0008
     8        0.0099             nan     0.1000    0.0006
     9        0.0093             nan     0.1000    0.0005
    10        0.0086             nan     0.1000    0.0006
    20        0.0053             nan     0.1000    0.0002
    40        0.0037             nan     0.1000    0.0000
    60        0.0034             nan     0.1000   -0.0000
    80        0.0033             nan     0.1000   -0.0000
   100        0.0032             nan     0.1000   -0.0000
   120        0.0032             nan     0.1000   -0.0000
   140        0.0031             nan     0.1000   -0.0000
   150        0.0031             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0172             nan     0.1000    0.0025
     2        0.0150             nan     0.1000    0.0020
     3        0.0131             nan     0.1000    0.0018
     4        0.0117             nan     0.1000    0.0013
     5        0.0105             nan     0.1000    0.0011
     6        0.0095             nan     0.1000    0.0010
     7        0.0086             nan     0.1000    0.0007
     8        0.0078             nan     0.1000    0.0006
     9        0.0071             nan     0.1000    0.0006
    10        0.0066             nan     0.1000    0.0005
    20        0.0040             nan     0.1000    0.0001
    40        0.0032             nan     0.1000   -0.0000
    60        0.0030             nan     0.1000   -0.0000
    80        0.0029             nan     0.1000   -0.0000
   100        0.0028             nan     0.1000   -0.0000
   120        0.0027             nan     0.1000   -0.0000
   140        0.0026             nan     0.1000   -0.0000
   150        0.0026             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0172             nan     0.1000    0.0030
     2        0.0149             nan     0.1000    0.0023
     3        0.0129             nan     0.1000    0.0020
     4        0.0114             nan     0.1000    0.0016
     5        0.0101             nan     0.1000    0.0012
     6        0.0089             nan     0.1000    0.0010
     7        0.0080             nan     0.1000    0.0009
     8        0.0073             nan     0.1000    0.0007
     9        0.0066             nan     0.1000    0.0005
    10        0.0061             nan     0.1000    0.0004
    20        0.0037             nan     0.1000    0.0000
    40        0.0030             nan     0.1000   -0.0000
    60        0.0028             nan     0.1000   -0.0000
    80        0.0026             nan     0.1000   -0.0000
   100        0.0025             nan     0.1000   -0.0000
   120        0.0024             nan     0.1000   -0.0000
   140        0.0023             nan     0.1000   -0.0000
   150        0.0023             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0169             nan     0.1000    0.0018
     2        0.0152             nan     0.1000    0.0017
     3        0.0137             nan     0.1000    0.0014
     4        0.0125             nan     0.1000    0.0012
     5        0.0115             nan     0.1000    0.0010
     6        0.0107             nan     0.1000    0.0009
     7        0.0099             nan     0.1000    0.0007
     8        0.0092             nan     0.1000    0.0007
     9        0.0086             nan     0.1000    0.0005
    10        0.0080             nan     0.1000    0.0005
    20        0.0048             nan     0.1000    0.0002
    40        0.0032             nan     0.1000    0.0000
    60        0.0029             nan     0.1000    0.0000
    80        0.0028             nan     0.1000   -0.0000
   100        0.0028             nan     0.1000   -0.0000
   120        0.0027             nan     0.1000   -0.0000
   140        0.0027             nan     0.1000   -0.0000
   150        0.0027             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0163             nan     0.1000    0.0024
     2        0.0143             nan     0.1000    0.0019
     3        0.0124             nan     0.1000    0.0016
     4        0.0110             nan     0.1000    0.0015
     5        0.0100             nan     0.1000    0.0011
     6        0.0089             nan     0.1000    0.0009
     7        0.0082             nan     0.1000    0.0008
     8        0.0075             nan     0.1000    0.0008
     9        0.0069             nan     0.1000    0.0006
    10        0.0063             nan     0.1000    0.0006
    20        0.0037             nan     0.1000    0.0001
    40        0.0028             nan     0.1000    0.0000
    60        0.0026             nan     0.1000   -0.0000
    80        0.0025             nan     0.1000   -0.0000
   100        0.0024             nan     0.1000   -0.0000
   120        0.0023             nan     0.1000   -0.0000
   140        0.0023             nan     0.1000   -0.0000
   150        0.0022             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0162             nan     0.1000    0.0025
     2        0.0138             nan     0.1000    0.0026
     3        0.0120             nan     0.1000    0.0021
     4        0.0104             nan     0.1000    0.0015
     5        0.0092             nan     0.1000    0.0012
     6        0.0081             nan     0.1000    0.0009
     7        0.0073             nan     0.1000    0.0007
     8        0.0066             nan     0.1000    0.0007
     9        0.0060             nan     0.1000    0.0006
    10        0.0055             nan     0.1000    0.0005
    20        0.0032             nan     0.1000    0.0001
    40        0.0025             nan     0.1000   -0.0000
    60        0.0024             nan     0.1000   -0.0000
    80        0.0023             nan     0.1000   -0.0000
   100        0.0021             nan     0.1000   -0.0000
   120        0.0020             nan     0.1000   -0.0000
   140        0.0019             nan     0.1000   -0.0000
   150        0.0019             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0177             nan     0.1000    0.0020
     2        0.0160             nan     0.1000    0.0017
     3        0.0145             nan     0.1000    0.0014
     4        0.0134             nan     0.1000    0.0012
     5        0.0124             nan     0.1000    0.0009
     6        0.0114             nan     0.1000    0.0009
     7        0.0105             nan     0.1000    0.0007
     8        0.0097             nan     0.1000    0.0008
     9        0.0092             nan     0.1000    0.0006
    10        0.0086             nan     0.1000    0.0005
    20        0.0055             nan     0.1000    0.0001
    40        0.0037             nan     0.1000    0.0000
    60        0.0034             nan     0.1000   -0.0000
    80        0.0033             nan     0.1000   -0.0000
   100        0.0032             nan     0.1000   -0.0000
   120        0.0032             nan     0.1000   -0.0000
   140        0.0032             nan     0.1000   -0.0000
   150        0.0031             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0172             nan     0.1000    0.0025
     2        0.0152             nan     0.1000    0.0020
     3        0.0133             nan     0.1000    0.0017
     4        0.0118             nan     0.1000    0.0013
     5        0.0106             nan     0.1000    0.0012
     6        0.0095             nan     0.1000    0.0010
     7        0.0086             nan     0.1000    0.0007
     8        0.0078             nan     0.1000    0.0006
     9        0.0072             nan     0.1000    0.0006
    10        0.0067             nan     0.1000    0.0005
    20        0.0040             nan     0.1000    0.0001
    40        0.0032             nan     0.1000    0.0000
    60        0.0031             nan     0.1000   -0.0000
    80        0.0029             nan     0.1000   -0.0000
   100        0.0028             nan     0.1000   -0.0000
   120        0.0027             nan     0.1000   -0.0000
   140        0.0027             nan     0.1000   -0.0000
   150        0.0026             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0168             nan     0.1000    0.0028
     2        0.0147             nan     0.1000    0.0024
     3        0.0130             nan     0.1000    0.0015
     4        0.0115             nan     0.1000    0.0017
     5        0.0101             nan     0.1000    0.0012
     6        0.0090             nan     0.1000    0.0010
     7        0.0082             nan     0.1000    0.0008
     8        0.0074             nan     0.1000    0.0007
     9        0.0067             nan     0.1000    0.0005
    10        0.0062             nan     0.1000    0.0005
    20        0.0038             nan     0.1000    0.0001
    40        0.0031             nan     0.1000   -0.0000
    60        0.0029             nan     0.1000   -0.0000
    80        0.0028             nan     0.1000   -0.0000
   100        0.0026             nan     0.1000   -0.0000
   120        0.0025             nan     0.1000   -0.0000
   140        0.0024             nan     0.1000   -0.0000
   150        0.0023             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0171             nan     0.1000    0.0020
     2        0.0154             nan     0.1000    0.0016
     3        0.0139             nan     0.1000    0.0013
     4        0.0127             nan     0.1000    0.0012
     5        0.0118             nan     0.1000    0.0008
     6        0.0108             nan     0.1000    0.0009
     7        0.0100             nan     0.1000    0.0008
     8        0.0093             nan     0.1000    0.0005
     9        0.0086             nan     0.1000    0.0006
    10        0.0080             nan     0.1000    0.0005
    20        0.0050             nan     0.1000    0.0002
    40        0.0035             nan     0.1000    0.0000
    60        0.0032             nan     0.1000   -0.0000
    80        0.0031             nan     0.1000   -0.0000
   100        0.0031             nan     0.1000   -0.0000
   120        0.0030             nan     0.1000   -0.0000
   140        0.0030             nan     0.1000   -0.0000
   150        0.0030             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0168             nan     0.1000    0.0023
     2        0.0148             nan     0.1000    0.0018
     3        0.0131             nan     0.1000    0.0017
     4        0.0117             nan     0.1000    0.0013
     5        0.0104             nan     0.1000    0.0013
     6        0.0094             nan     0.1000    0.0009
     7        0.0084             nan     0.1000    0.0009
     8        0.0076             nan     0.1000    0.0009
     9        0.0070             nan     0.1000    0.0006
    10        0.0064             nan     0.1000    0.0005
    20        0.0039             nan     0.1000    0.0001
    40        0.0031             nan     0.1000   -0.0000
    60        0.0030             nan     0.1000   -0.0000
    80        0.0028             nan     0.1000   -0.0000
   100        0.0027             nan     0.1000   -0.0000
   120        0.0027             nan     0.1000   -0.0000
   140        0.0026             nan     0.1000   -0.0000
   150        0.0026             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0165             nan     0.1000    0.0028
     2        0.0142             nan     0.1000    0.0020
     3        0.0123             nan     0.1000    0.0021
     4        0.0110             nan     0.1000    0.0014
     5        0.0097             nan     0.1000    0.0012
     6        0.0086             nan     0.1000    0.0010
     7        0.0076             nan     0.1000    0.0008
     8        0.0069             nan     0.1000    0.0007
     9        0.0064             nan     0.1000    0.0006
    10        0.0059             nan     0.1000    0.0005
    20        0.0036             nan     0.1000    0.0001
    40        0.0029             nan     0.1000   -0.0000
    60        0.0027             nan     0.1000   -0.0000
    80        0.0026             nan     0.1000   -0.0000
   100        0.0025             nan     0.1000   -0.0000
   120        0.0024             nan     0.1000   -0.0000
   140        0.0023             nan     0.1000   -0.0000
   150        0.0022             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0170             nan     0.1000    0.0020
     2        0.0154             nan     0.1000    0.0015
     3        0.0139             nan     0.1000    0.0014
     4        0.0128             nan     0.1000    0.0012
     5        0.0117             nan     0.1000    0.0010
     6        0.0108             nan     0.1000    0.0008
     7        0.0100             nan     0.1000    0.0007
     8        0.0092             nan     0.1000    0.0006
     9        0.0086             nan     0.1000    0.0006
    10        0.0081             nan     0.1000    0.0005
    20        0.0051             nan     0.1000    0.0002
    40        0.0036             nan     0.1000    0.0000
    60        0.0033             nan     0.1000   -0.0000
    80        0.0032             nan     0.1000    0.0000
   100        0.0031             nan     0.1000   -0.0000
   120        0.0031             nan     0.1000   -0.0000
   140        0.0030             nan     0.1000   -0.0000
   150        0.0030             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0167             nan     0.1000    0.0022
     2        0.0146             nan     0.1000    0.0019
     3        0.0128             nan     0.1000    0.0016
     4        0.0115             nan     0.1000    0.0012
     5        0.0103             nan     0.1000    0.0012
     6        0.0092             nan     0.1000    0.0010
     7        0.0084             nan     0.1000    0.0008
     8        0.0076             nan     0.1000    0.0007
     9        0.0070             nan     0.1000    0.0006
    10        0.0066             nan     0.1000    0.0004
    20        0.0041             nan     0.1000    0.0001
    40        0.0032             nan     0.1000   -0.0000
    60        0.0031             nan     0.1000   -0.0000
    80        0.0029             nan     0.1000   -0.0000
   100        0.0028             nan     0.1000   -0.0000
   120        0.0027             nan     0.1000   -0.0000
   140        0.0026             nan     0.1000   -0.0000
   150        0.0026             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0165             nan     0.1000    0.0024
     2        0.0142             nan     0.1000    0.0022
     3        0.0124             nan     0.1000    0.0018
     4        0.0108             nan     0.1000    0.0015
     5        0.0095             nan     0.1000    0.0013
     6        0.0086             nan     0.1000    0.0009
     7        0.0078             nan     0.1000    0.0008
     8        0.0071             nan     0.1000    0.0008
     9        0.0065             nan     0.1000    0.0006
    10        0.0060             nan     0.1000    0.0004
    20        0.0037             nan     0.1000    0.0001
    40        0.0030             nan     0.1000   -0.0000
    60        0.0028             nan     0.1000   -0.0000
    80        0.0026             nan     0.1000   -0.0000
   100        0.0025             nan     0.1000   -0.0000
   120        0.0024             nan     0.1000   -0.0000
   140        0.0023             nan     0.1000   -0.0000
   150        0.0022             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0180             nan     0.1000    0.0019
     2        0.0161             nan     0.1000    0.0018
     3        0.0147             nan     0.1000    0.0015
     4        0.0133             nan     0.1000    0.0013
     5        0.0123             nan     0.1000    0.0009
     6        0.0114             nan     0.1000    0.0011
     7        0.0105             nan     0.1000    0.0009
     8        0.0097             nan     0.1000    0.0007
     9        0.0090             nan     0.1000    0.0006
    10        0.0084             nan     0.1000    0.0006
    20        0.0052             nan     0.1000    0.0002
    40        0.0035             nan     0.1000    0.0000
    60        0.0032             nan     0.1000   -0.0000
    80        0.0031             nan     0.1000   -0.0000
   100        0.0031             nan     0.1000   -0.0000
   120        0.0030             nan     0.1000   -0.0000
   140        0.0030             nan     0.1000   -0.0000
   150        0.0030             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0174             nan     0.1000    0.0024
     2        0.0153             nan     0.1000    0.0019
     3        0.0136             nan     0.1000    0.0017
     4        0.0122             nan     0.1000    0.0015
     5        0.0110             nan     0.1000    0.0011
     6        0.0098             nan     0.1000    0.0011
     7        0.0089             nan     0.1000    0.0009
     8        0.0081             nan     0.1000    0.0008
     9        0.0073             nan     0.1000    0.0007
    10        0.0068             nan     0.1000    0.0006
    20        0.0040             nan     0.1000    0.0001
    40        0.0031             nan     0.1000    0.0000
    60        0.0029             nan     0.1000   -0.0000
    80        0.0028             nan     0.1000   -0.0000
   100        0.0027             nan     0.1000   -0.0000
   120        0.0026             nan     0.1000   -0.0000
   140        0.0026             nan     0.1000   -0.0000
   150        0.0025             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0171             nan     0.1000    0.0030
     2        0.0148             nan     0.1000    0.0023
     3        0.0128             nan     0.1000    0.0019
     4        0.0112             nan     0.1000    0.0016
     5        0.0099             nan     0.1000    0.0012
     6        0.0089             nan     0.1000    0.0010
     7        0.0080             nan     0.1000    0.0009
     8        0.0073             nan     0.1000    0.0007
     9        0.0067             nan     0.1000    0.0006
    10        0.0060             nan     0.1000    0.0005
    20        0.0037             nan     0.1000    0.0001
    40        0.0030             nan     0.1000   -0.0000
    60        0.0028             nan     0.1000   -0.0000
    80        0.0026             nan     0.1000   -0.0000
   100        0.0025             nan     0.1000   -0.0000
   120        0.0024             nan     0.1000   -0.0000
   140        0.0023             nan     0.1000   -0.0000
   150        0.0023             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0174             nan     0.1000    0.0020
     2        0.0156             nan     0.1000    0.0016
     3        0.0141             nan     0.1000    0.0015
     4        0.0130             nan     0.1000    0.0012
     5        0.0120             nan     0.1000    0.0008
     6        0.0110             nan     0.1000    0.0009
     7        0.0103             nan     0.1000    0.0007
     8        0.0095             nan     0.1000    0.0007
     9        0.0089             nan     0.1000    0.0006
    10        0.0083             nan     0.1000    0.0004
    20        0.0052             nan     0.1000    0.0002
    40        0.0035             nan     0.1000    0.0000
    60        0.0031             nan     0.1000   -0.0000
    80        0.0030             nan     0.1000   -0.0000
   100        0.0030             nan     0.1000   -0.0000
   120        0.0029             nan     0.1000   -0.0000
   140        0.0029             nan     0.1000   -0.0000
   150        0.0029             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0170             nan     0.1000    0.0024
     2        0.0148             nan     0.1000    0.0020
     3        0.0131             nan     0.1000    0.0017
     4        0.0115             nan     0.1000    0.0014
     5        0.0103             nan     0.1000    0.0012
     6        0.0093             nan     0.1000    0.0009
     7        0.0084             nan     0.1000    0.0007
     8        0.0077             nan     0.1000    0.0007
     9        0.0070             nan     0.1000    0.0005
    10        0.0064             nan     0.1000    0.0005
    20        0.0039             nan     0.1000    0.0001
    40        0.0030             nan     0.1000   -0.0000
    60        0.0028             nan     0.1000   -0.0000
    80        0.0027             nan     0.1000   -0.0000
   100        0.0026             nan     0.1000   -0.0000
   120        0.0025             nan     0.1000   -0.0000
   140        0.0025             nan     0.1000   -0.0000
   150        0.0025             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0166             nan     0.1000    0.0030
     2        0.0143             nan     0.1000    0.0023
     3        0.0124             nan     0.1000    0.0018
     4        0.0107             nan     0.1000    0.0016
     5        0.0094             nan     0.1000    0.0012
     6        0.0084             nan     0.1000    0.0010
     7        0.0076             nan     0.1000    0.0008
     8        0.0069             nan     0.1000    0.0007
     9        0.0063             nan     0.1000    0.0004
    10        0.0058             nan     0.1000    0.0004
    20        0.0035             nan     0.1000    0.0001
    40        0.0028             nan     0.1000   -0.0000
    60        0.0026             nan     0.1000   -0.0000
    80        0.0025             nan     0.1000   -0.0000
   100        0.0024             nan     0.1000   -0.0000
   120        0.0023             nan     0.1000   -0.0000
   140        0.0022             nan     0.1000   -0.0000
   150        0.0021             nan     0.1000   -0.0000

Iter   TrainDeviance   ValidDeviance   StepSize   Improve
     1        0.0174             nan     0.1000    0.0020
     2        0.0157             nan     0.1000    0.0016
     3        0.0143             nan     0.1000    0.0013
     4        0.0131             nan     0.1000    0.0013
     5        0.0120             nan     0.1000    0.0009
     6        0.0110             nan     0.1000    0.0010
     7        0.0102             nan     0.1000    0.0009
     8        0.0095             nan     0.1000    0.0007
     9        0.0089             nan     0.1000    0.0006
    10        0.0083             nan     0.1000    0.0004
    20        0.0052             nan     0.1000    0.0002
    40        0.0035             nan     0.1000    0.0000
    60        0.0033             nan     0.1000    0.0000
    80        0.0032             nan     0.1000    0.0000
   100        0.0031             nan     0.1000   -0.0000
predictions_gradiant=predict(gbm, grad_test)
predictions_gradiant
 [1] 0.7312512 0.6110841 0.6447126 0.5918961 0.9390864 0.4994490 0.9343960 0.8764823
 [9] 0.6642352 0.8665650 0.8496557 0.9299986 0.8025594 0.6674379 0.6635047 0.5310443
[17] 0.7958246 0.9277737 0.6703198 0.5180410 0.7502517 0.9248009 0.7616795 0.5067965
[25] 0.6649484 0.7486663 0.6986623 0.9300729 0.9211120 0.7892452 0.5843112 0.4808641
[33] 0.8319404 0.7411639 0.9301122 0.8636376 0.6701223 0.7119825 0.9390864 0.9355153
[41] 0.9251341 0.8086447 0.6287214 0.5737942 0.6884362 0.6745289 0.7900782 0.6929036
[49] 0.9204437 0.5303182 0.7182076 0.7186729 0.7486115 0.6908829 0.5963963 0.7027322
[57] 0.9291132 0.7688524 0.7048672 0.7373202 0.7288994 0.6698136 0.7741349 0.6313808
[65] 0.5785156 0.7486115 0.5105983 0.7214079 0.8519996 0.7761120 0.7626098 0.6834249
[73] 0.7763902 0.4497845 0.4749503 0.6238822 0.5367748 0.7442117 0.7157479 0.5518503
[81] 0.6571955 0.6481045 0.7176188 0.7350897 0.7760342 0.5533770 0.8989154 0.8834537
[89] 0.7090324 0.6743371 0.9192616 0.8834633 0.7673264 0.4869443 0.6548539 0.6699033
[97] 0.6008233 0.6347723
RMSE(predictions_gradiant,grad_test$Chance.of.Admit)
[1] 0.06506031
set.seed(1)

svmln <- train( 
Chance.of.Admit ~., data = grad_train, method = "svmLinear",
preProcess=c("knnImpute","nzv"),
trControl = trainControl("cv", number = 10))
svmln
Support Vector Machines with Linear Kernel 

402 samples
  7 predictor

Pre-processing: nearest neighbor imputation (7), centered (7), scaled (7) 
Resampling: Cross-Validated (10 fold) 
Summary of sample sizes: 362, 362, 362, 362, 361, 362, ... 
Resampling results:

  RMSE        Rsquared   MAE       
  0.05890364  0.8252621  0.04195713

Tuning parameter 'C' was held constant at a value of 1
 
predict_svm1=predict(svmln, grad_test ) 
predict_svm1
       11        14        15        21        26        30        34        36        42 
0.7349442 0.6520435 0.6561656 0.6301389 0.9658118 0.5008429 0.9420740 0.8601386 0.6673583 
       44        46        48        49        51        55        57        66        85 
0.8470459 0.8391209 0.9438762 0.8065938 0.6848706 0.6634051 0.5506980 0.7922637 0.9330774 
       88        95       106       108       112       119       125       136       137 
0.6677168 0.5449851 0.7834631 0.9302013 0.7771078 0.4931952 0.6726375 0.7758820 0.6870078 
      149       151       155       161       162       165       184       188       191 
0.9580015 0.9049063 0.7723407 0.5827031 0.5057015 0.8367839 0.7530224 0.9196749 0.8526302 
      198       200       203       213       216       219       225       226       227 
0.6526761 0.7354053 1.0033895 0.9696718 0.9080088 0.8139394 0.6227785 0.5716675 0.6904895 
      228       230       233       235       240       252       258       264       266 
0.6981628 0.8155326 0.6638609 0.8870511 0.5501220 0.7297737 0.7572985 0.7640040 0.6946482 
      275       280       285       289       297       302       304       310       313 
0.5911577 0.6976023 0.9436592 0.7810334 0.7153855 0.7320279 0.7464356 0.7122826 0.7991737 
      316       318       319       330       334       336       339       340       341 
0.6178487 0.5695317 0.7713820 0.5353522 0.7411521 0.8480899 0.7911595 0.7801596 0.7103530 
      342       345       347       350       359       361       374       375       384 
0.7857762 0.4711919 0.5259291 0.6203239 0.5666108 0.7829658 0.7413324 0.5755968 0.6280245 
      388       394       399       403       409       426       430       439       440 
0.6287335 0.7170608 0.7266800 0.7950836 0.5834713 0.9479870 0.8859121 0.7326386 0.6746860 
      443       452       454       458       472       474       485       495 
0.9130612 0.8692732 0.7593244 0.4954942 0.6409913 0.6419647 0.6470477 0.6545195 
RMSE(predict_svm1,grad_test$Chance.of.Admit) 
[1] 0.06444613
set.seed(1)

svmr <- train(
Chance.of.Admit ~., data = grad_train, method = "svmRadial",
preProcess=c("knnImpute","nzv"),
trControl = trainControl("cv", number = 10))
 
svmr
Support Vector Machines with Radial Basis Function Kernel 

402 samples
  7 predictor

Pre-processing: nearest neighbor imputation (7), centered (7), scaled (7) 
Resampling: Cross-Validated (10 fold) 
Summary of sample sizes: 362, 362, 362, 362, 361, 362, ... 
Resampling results across tuning parameters:

  C     RMSE        Rsquared   MAE       
  0.25  0.06436491  0.7998396  0.04603864
  0.50  0.06266779  0.8073450  0.04499046
  1.00  0.06198749  0.8084937  0.04445152

Tuning parameter 'sigma' was held constant at a value of 0.1510433
RMSE was used to select the optimal model using the smallest value.
The final values used for the model were sigma = 0.1510433 and C = 1.
predict_svmrad<-predict(svmr,grad_test)
predict_svmrad 
 [1] 0.7396355 0.6672856 0.6814889 0.6556478 0.9572348 0.4863187 0.9353423 0.8818715
 [9] 0.6783271 0.8181653 0.8585198 0.8396567 0.7789740 0.6697860 0.6668925 0.5700981
[17] 0.7517691 0.9431133 0.6469219 0.5099384 0.7555353 0.9225318 0.7766807 0.5159532
[25] 0.6306923 0.7727081 0.6783812 0.9430524 0.9203955 0.7531660 0.5944751 0.5557183
[33] 0.8428088 0.7633287 0.9379883 0.8757310 0.6340714 0.7581087 0.9612753 0.9380781
[41] 0.9303201 0.7979897 0.6294792 0.5959151 0.6795949 0.6796421 0.8010740 0.6863689
[49] 0.9121789 0.5459792 0.7316879 0.7356187 0.7604338 0.7015158 0.6109415 0.7091401
[57] 0.9275310 0.7941443 0.7275233 0.7456368 0.7490951 0.6919584 0.7832075 0.6411286
[65] 0.5889180 0.7738484 0.5251196 0.7526566 0.8518429 0.7978877 0.7711083 0.7371695
[73] 0.7786229 0.4470473 0.4934964 0.6340413 0.5583649 0.7571265 0.7540902 0.5907358
[81] 0.6351664 0.6508042 0.7289639 0.7511650 0.7918290 0.5441851 0.9220671 0.8915359
[89] 0.7311670 0.7003926 0.9333385 0.8791524 0.7500655 0.4763592 0.6348940 0.6300875
[97] 0.6717996 0.6613583
RMSE(predict_svmrad,grad_test$Chance.of.Admit)
[1] 0.06292008
 
compare=resamples(list(Ran=grad_rf,G=gbm,SL=svmln,SR=svmr))
summary(compare)

Call:
summary.resamples(object = compare)

Models: Ran, G, SL, SR 
Number of resamples: 10 

MAE 
          Min.    1st Qu.     Median       Mean    3rd Qu.       Max. NA's
Ran 0.03603111 0.03861066 0.04327403 0.04372467 0.04710168 0.05633216    0
G   0.03678734 0.04003845 0.04299143 0.04402362 0.04651909 0.05689596    0
SL  0.03280300 0.03822735 0.04154175 0.04195713 0.04552548 0.05597608    0
SR  0.03192950 0.04200472 0.04535241 0.04445152 0.04709136 0.05536018    0

RMSE 
          Min.    1st Qu.     Median       Mean    3rd Qu.       Max. NA's
Ran 0.04609548 0.05560320 0.06055004 0.06113433 0.06405183 0.08654964    0
G   0.04944747 0.05513778 0.05967615 0.06136928 0.06462387 0.08609828    0
SL  0.04252668 0.05108978 0.05851435 0.05890364 0.06448775 0.08524598    0
SR  0.04221698 0.05684396 0.06209856 0.06198749 0.06674123 0.08362664    0

Rsquared 
         Min.   1st Qu.    Median      Mean   3rd Qu.      Max. NA's
Ran 0.7088202 0.7875525 0.8058082 0.8106544 0.8386090 0.8834864    0
G   0.7165030 0.7749421 0.8158588 0.8089640 0.8452834 0.8675045    0
SL  0.7375898 0.7916795 0.8261871 0.8252621 0.8470012 0.9092588    0
SR  0.7367415 0.7873276 0.8027157 0.8084937 0.8187553 0.8895089    0
set.seed(1)

inTrain = createDataPartition(grad_train$Chance.of.Admit, p=0.9, list=FALSE)
grad_90_train = grad_train[inTrain,]
grad_val = grad_train[-inTrain,] 
str(grad_90_train) 
'data.frame':   362 obs. of  8 variables:
 $ GRE.Score        : int  337 324 316 322 314 330 308 302 323 327 ...
 $ TOEFL.Score      : int  118 107 104 110 103 115 101 102 108 111 ...
 $ University.Rating: int  4 4 3 3 2 5 2 1 3 4 ...
 $ SOP              : num  4.5 4 3 3.5 2 4.5 3 2 3.5 4 ...
 $ LOR              : num  4.5 4.5 3.5 2.5 3 3 4 1.5 3 4.5 ...
 $ CGPA             : num  9.65 8.87 8 8.67 8.21 9.34 7.9 8 8.6 9 ...
 $ Research         : Factor w/ 2 levels "0","1": 2 2 2 2 1 2 1 1 1 2 ...
 $ Chance.of.Admit  : num  0.92 0.76 0.72 0.8 0.65 0.9 0.68 0.5 0.45 0.84 ...
set.seed(1)

grad_train1x<-grad_90_train[,-8] 
grad_train1y<-grad_90_train[,8]
grad_test1x<-grad_test[,-8]
grad_test1y<-grad_test[,8]
grad_valx<-grad_val[,-8]
grad_valy<-grad_val[,8]
set.seed(1)
preproc <- preProcess(grad_train1x, method="knnImpute") 

train.imputed <- predict(preproc, grad_train1x)
test.imputed <- predict(preproc, grad_test1x)  
val_imputed <- predict(preproc, grad_valx)
new_train<-train.imputed
new_test<-test.imputed 
new_val<-val_imputed
new_train
new_test
new_val
set.seed(1)
library(mltools)
library(data.table) 
data.table 1.14.6 using 6 threads (see ?getDTthreads).  Latest news: r-datatable.com
new_train_trim<-one_hot(data.table(new_train), cols = "auto", sparsifyNAs = FALSE, naCols = FALSE,dropCols = TRUE, dropUnusedLevels = FALSE)

new_test_trim<-one_hot(data.table(new_test), cols = "auto", sparsifyNAs = FALSE, naCols = FALSE,dropCols = TRUE, dropUnusedLevels = FALSE)

new_val_trim<-one_hot(data.table(new_val), cols = "auto", sparsifyNAs = FALSE, naCols = FALSE,dropCols = TRUE, dropUnusedLevels = FALSE)
new_train_trim<-as.data.frame(new_train_trim)
new_test_trim<-as.data.frame(new_test_trim)
new_val_trim<-as.data.frame(new_val_trim) 
new_train_trim
new_test_trim
new_val_trim
library(keras)
library(tensorflow)

Attaching package: ‘tensorflow’

The following object is masked from ‘package:caret’:

    train
library(caret) 
set.seed(1)

model <- keras_model_sequential() 
2022-12-06 20:22:17.784810: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'cudart64_110.dll'; dlerror: cudart64_110.dll not found
2022-12-06 20:22:17.785231: I tensorflow/stream_executor/cuda/cudart_stub.cc:29] Ignore above cudart dlerror if you do not have a GPU set up on your machine.
Loaded Tensorflow version 2.9.3
2022-12-06 20:22:43.920974: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'cudart64_110.dll'; dlerror: cudart64_110.dll not found
2022-12-06 20:22:43.921572: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'cublas64_11.dll'; dlerror: cublas64_11.dll not found
2022-12-06 20:22:43.922152: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'cublasLt64_11.dll'; dlerror: cublasLt64_11.dll not found
2022-12-06 20:22:43.922707: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'cufft64_10.dll'; dlerror: cufft64_10.dll not found
2022-12-06 20:22:43.923265: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'curand64_10.dll'; dlerror: curand64_10.dll not found
2022-12-06 20:22:43.923826: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'cusolver64_11.dll'; dlerror: cusolver64_11.dll not found
2022-12-06 20:22:43.924472: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'cusparse64_11.dll'; dlerror: cusparse64_11.dll not found
2022-12-06 20:22:43.925157: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'cudnn64_8.dll'; dlerror: cudnn64_8.dll not found
2022-12-06 20:22:43.925495: W tensorflow/core/common_runtime/gpu/gpu_device.cc:1850] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.
Skipping registering GPU devices...
2022-12-06 20:22:43.928882: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations:  AVX AVX2
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
model %>%
  layer_dense(units=128, activation="relu", input_shape=dim(new_train_trim)[2])%>%
  layer_dropout(0.2)%>%
  layer_dense(units=128, activation="relu")%>%
  layer_dropout(0.2) %>%
  layer_dense(units=1)

model
Model: "sequential"
__________________________________________________________________________________________
 Layer (type)                           Output Shape                        Param #       
==========================================================================================
 dense_2 (Dense)                        (None, 128)                         1152          
 dropout_1 (Dropout)                    (None, 128)                         0             
 dense_1 (Dense)                        (None, 128)                         16512         
 dropout (Dropout)                      (None, 128)                         0             
 dense (Dense)                          (None, 1)                           129           
==========================================================================================
Total params: 17,793
Trainable params: 17,793
Non-trainable params: 0
__________________________________________________________________________________________
set.seed(1)
model %>% compile(
optimizer = "sgd", 
loss = 'mse', 
metrics = list("mae"))

model
Model: "sequential"
__________________________________________________________________________________________
 Layer (type)                           Output Shape                        Param #       
==========================================================================================
 dense_2 (Dense)                        (None, 128)                         1152          
 dropout_1 (Dropout)                    (None, 128)                         0             
 dense_1 (Dense)                        (None, 128)                         16512         
 dropout (Dropout)                      (None, 128)                         0             
 dense (Dense)                          (None, 1)                           129           
==========================================================================================
Total params: 17,793
Trainable params: 17,793
Non-trainable params: 0
__________________________________________________________________________________________
set.seed(111)
model %>% fit(
as.matrix(new_train_trim), grad_train1y, epochs = 30,  
batch_size=100, validation_data=list(as.matrix(new_val_trim),
grad_valy )) 
Epoch 1/30

1/4 [======>.......................] - ETA: 1s - loss: 0.9755 - mae: 0.9515
4/4 [==============================] - 1s 4ms/step - loss: 0.6822 - mae: 0.7777

4/4 [==============================] - 2s 433ms/step - loss: 0.6822 - mae: 0.7777 - val_loss: 0.2994 - val_mae: 0.5229
Epoch 2/30

1/4 [======>.......................] - ETA: 0s - loss: 0.4005 - mae: 0.5781
4/4 [==============================] - 0s 3ms/step - loss: 0.2858 - mae: 0.4850

4/4 [==============================] - 0s 58ms/step - loss: 0.2858 - mae: 0.4850 - val_loss: 0.1234 - val_mae: 0.3256
Epoch 3/30

1/4 [======>.......................] - ETA: 0s - loss: 0.1804 - mae: 0.3821
4/4 [==============================] - 0s 3ms/step - loss: 0.1416 - mae: 0.3314

4/4 [==============================] - 0s 51ms/step - loss: 0.1416 - mae: 0.3314 - val_loss: 0.0701 - val_mae: 0.2320
Epoch 4/30

1/4 [======>.......................] - ETA: 0s - loss: 0.1260 - mae: 0.3110
4/4 [==============================] - 0s 3ms/step - loss: 0.1191 - mae: 0.2902

4/4 [==============================] - 0s 51ms/step - loss: 0.1191 - mae: 0.2902 - val_loss: 0.0515 - val_mae: 0.1953
Epoch 5/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0892 - mae: 0.2456
4/4 [==============================] - 0s 4ms/step - loss: 0.0843 - mae: 0.2407

4/4 [==============================] - 0s 55ms/step - loss: 0.0843 - mae: 0.2407 - val_loss: 0.0436 - val_mae: 0.1780
Epoch 6/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0889 - mae: 0.2486
4/4 [==============================] - 0s 4ms/step - loss: 0.0817 - mae: 0.2349

4/4 [==============================] - 0s 56ms/step - loss: 0.0817 - mae: 0.2349 - val_loss: 0.0380 - val_mae: 0.1628
Epoch 7/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0825 - mae: 0.2260
4/4 [==============================] - 0s 3ms/step - loss: 0.0820 - mae: 0.2267

4/4 [==============================] - 0s 51ms/step - loss: 0.0820 - mae: 0.2267 - val_loss: 0.0355 - val_mae: 0.1561
Epoch 8/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0748 - mae: 0.2268
4/4 [==============================] - 0s 4ms/step - loss: 0.0655 - mae: 0.2088

4/4 [==============================] - 0s 56ms/step - loss: 0.0655 - mae: 0.2088 - val_loss: 0.0337 - val_mae: 0.1497
Epoch 9/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0673 - mae: 0.2035
4/4 [==============================] - 0s 3ms/step - loss: 0.0676 - mae: 0.2087

4/4 [==============================] - 0s 51ms/step - loss: 0.0676 - mae: 0.2087 - val_loss: 0.0313 - val_mae: 0.1455
Epoch 10/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0497 - mae: 0.1877
4/4 [==============================] - 0s 4ms/step - loss: 0.0616 - mae: 0.2049

4/4 [==============================] - 0s 51ms/step - loss: 0.0616 - mae: 0.2049 - val_loss: 0.0295 - val_mae: 0.1401
Epoch 11/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0910 - mae: 0.2324
4/4 [==============================] - 0s 4ms/step - loss: 0.0736 - mae: 0.2134

4/4 [==============================] - 0s 52ms/step - loss: 0.0736 - mae: 0.2134 - val_loss: 0.0277 - val_mae: 0.1357
Epoch 12/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0657 - mae: 0.2077
4/4 [==============================] - 0s 3ms/step - loss: 0.0599 - mae: 0.1985

4/4 [==============================] - 0s 51ms/step - loss: 0.0599 - mae: 0.1985 - val_loss: 0.0265 - val_mae: 0.1331
Epoch 13/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0526 - mae: 0.1795
4/4 [==============================] - 0s 3ms/step - loss: 0.0695 - mae: 0.2079

4/4 [==============================] - 0s 56ms/step - loss: 0.0695 - mae: 0.2079 - val_loss: 0.0252 - val_mae: 0.1300
Epoch 14/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0665 - mae: 0.1982
4/4 [==============================] - 0s 3ms/step - loss: 0.0650 - mae: 0.2051

4/4 [==============================] - 0s 51ms/step - loss: 0.0650 - mae: 0.2051 - val_loss: 0.0238 - val_mae: 0.1242
Epoch 15/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0547 - mae: 0.1839
4/4 [==============================] - 0s 3ms/step - loss: 0.0572 - mae: 0.1928

4/4 [==============================] - 0s 51ms/step - loss: 0.0572 - mae: 0.1928 - val_loss: 0.0226 - val_mae: 0.1212
Epoch 16/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0497 - mae: 0.1864
4/4 [==============================] - 0s 3ms/step - loss: 0.0576 - mae: 0.1930

4/4 [==============================] - 0s 51ms/step - loss: 0.0576 - mae: 0.1930 - val_loss: 0.0214 - val_mae: 0.1170
Epoch 17/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0662 - mae: 0.2108
4/4 [==============================] - 0s 3ms/step - loss: 0.0574 - mae: 0.1933

4/4 [==============================] - 0s 51ms/step - loss: 0.0574 - mae: 0.1933 - val_loss: 0.0208 - val_mae: 0.1146
Epoch 18/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0506 - mae: 0.1814
4/4 [==============================] - 0s 3ms/step - loss: 0.0578 - mae: 0.1888

4/4 [==============================] - 0s 51ms/step - loss: 0.0578 - mae: 0.1888 - val_loss: 0.0200 - val_mae: 0.1128
Epoch 19/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0443 - mae: 0.1715
4/4 [==============================] - 0s 4ms/step - loss: 0.0446 - mae: 0.1699

4/4 [==============================] - 0s 55ms/step - loss: 0.0446 - mae: 0.1699 - val_loss: 0.0194 - val_mae: 0.1112
Epoch 20/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0513 - mae: 0.1827
4/4 [==============================] - 0s 4ms/step - loss: 0.0467 - mae: 0.1702

4/4 [==============================] - 0s 56ms/step - loss: 0.0467 - mae: 0.1702 - val_loss: 0.0188 - val_mae: 0.1089
Epoch 21/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0498 - mae: 0.1795
4/4 [==============================] - 0s 3ms/step - loss: 0.0457 - mae: 0.1750

4/4 [==============================] - 0s 51ms/step - loss: 0.0457 - mae: 0.1750 - val_loss: 0.0182 - val_mae: 0.1075
Epoch 22/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0457 - mae: 0.1682
4/4 [==============================] - 0s 3ms/step - loss: 0.0477 - mae: 0.1722

4/4 [==============================] - 0s 57ms/step - loss: 0.0477 - mae: 0.1722 - val_loss: 0.0176 - val_mae: 0.1055
Epoch 23/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0551 - mae: 0.1911
4/4 [==============================] - 0s 3ms/step - loss: 0.0556 - mae: 0.1904

4/4 [==============================] - 0s 51ms/step - loss: 0.0556 - mae: 0.1904 - val_loss: 0.0169 - val_mae: 0.1031
Epoch 24/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0567 - mae: 0.1787
4/4 [==============================] - 0s 3ms/step - loss: 0.0447 - mae: 0.1686

4/4 [==============================] - 0s 56ms/step - loss: 0.0447 - mae: 0.1686 - val_loss: 0.0163 - val_mae: 0.1004
Epoch 25/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0385 - mae: 0.1561
4/4 [==============================] - 0s 3ms/step - loss: 0.0416 - mae: 0.1624

4/4 [==============================] - 0s 51ms/step - loss: 0.0416 - mae: 0.1624 - val_loss: 0.0159 - val_mae: 0.0988
Epoch 26/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0502 - mae: 0.1794
4/4 [==============================] - 0s 4ms/step - loss: 0.0460 - mae: 0.1715

4/4 [==============================] - 0s 51ms/step - loss: 0.0460 - mae: 0.1715 - val_loss: 0.0155 - val_mae: 0.0971
Epoch 27/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0499 - mae: 0.1806
4/4 [==============================] - 0s 3ms/step - loss: 0.0423 - mae: 0.1631

4/4 [==============================] - 0s 51ms/step - loss: 0.0423 - mae: 0.1631 - val_loss: 0.0153 - val_mae: 0.0961
Epoch 28/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0356 - mae: 0.1478
4/4 [==============================] - 0s 3ms/step - loss: 0.0393 - mae: 0.1572

4/4 [==============================] - 0s 56ms/step - loss: 0.0393 - mae: 0.1572 - val_loss: 0.0150 - val_mae: 0.0951
Epoch 29/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0379 - mae: 0.1561
4/4 [==============================] - 0s 3ms/step - loss: 0.0426 - mae: 0.1646

4/4 [==============================] - 0s 51ms/step - loss: 0.0426 - mae: 0.1646 - val_loss: 0.0146 - val_mae: 0.0940
Epoch 30/30

1/4 [======>.......................] - ETA: 0s - loss: 0.0378 - mae: 0.1537
4/4 [==============================] - 0s 4ms/step - loss: 0.0376 - mae: 0.1534

4/4 [==============================] - 0s 55ms/step - loss: 0.0376 - mae: 0.1534 - val_loss: 0.0143 - val_mae: 0.0927
model
Model: "sequential"
__________________________________________________________________________________________
 Layer (type)                           Output Shape                        Param #       
==========================================================================================
 dense_2 (Dense)                        (None, 128)                         1152          
 dropout_1 (Dropout)                    (None, 128)                         0             
 dense_1 (Dense)                        (None, 128)                         16512         
 dropout (Dropout)                      (None, 128)                         0             
 dense (Dense)                          (None, 1)                           129           
==========================================================================================
Total params: 17,793
Trainable params: 17,793
Non-trainable params: 0
__________________________________________________________________________________________
set.seed(1)
model %>% evaluate(as.matrix(new_train_trim), grad_train1y) 

 1/12 [=>............................] - ETA: 0s - loss: 0.0123 - mae: 0.0952
12/12 [==============================] - 0s 1ms/step - loss: 0.0137 - mae: 0.0956

12/12 [==============================] - 0s 1ms/step - loss: 0.0137 - mae: 0.0956
      loss        mae 
0.01368135 0.09561045 
set.seed(1)
library(tfruns)
runs <- tuning_run("~/ProjectScript.R", 
                   flags = list( 
                     nodes = c(16, 32, 64),
                     nodes2 = c(50, 32, 10),
                     learning_rate = c(0.01,  0.001),
                     batch_size = c(30, 50),
                     epochs = c(30, 50),
                     activation = c("relu", "sigmoid", "tanh"),
                     activation2 = c("relu", "sigmoid", "tanh"),
                     dropout = c(0.2, 0.6),
                     dropout2 = c(0.2, 0.6)
                   ), sample = 0.02
                   )
2,592 total combinations of flags 
(sampled to 52 combinations)

Training run 1/52 (flags = list(64, 10, 0.01, 30, 30, "tanh", "sigmoid", 0.6, 0.2)) 
Using run directory runs/2022-12-07T02-27-21Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

 1/13 [=>............................] - ETA: 5s - loss: 0.2002 - mae: 0.3659
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0015s vs `on_train_batch_end` time: 0.0026s). Check your callbacks.

13/13 [==============================] - 0s 2ms/step - loss: 0.1612 - mae: 0.3284

13/13 [==============================] - 1s 66ms/step - loss: 0.1612 - mae: 0.3284 - val_loss: 0.0686 - val_mae: 0.2238
Epoch 2/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0818 - mae: 0.2209
13/13 [==============================] - 0s 3ms/step - loss: 0.1237 - mae: 0.2844

13/13 [==============================] - 0s 38ms/step - loss: 0.1237 - mae: 0.2844 - val_loss: 0.0549 - val_mae: 0.1996
Epoch 3/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0771 - mae: 0.2139
13/13 [==============================] - 0s 2ms/step - loss: 0.1217 - mae: 0.2817

13/13 [==============================] - 0s 16ms/step - loss: 0.1217 - mae: 0.2817 - val_loss: 0.0448 - val_mae: 0.1795
Epoch 4/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0924 - mae: 0.2224
13/13 [==============================] - 0s 2ms/step - loss: 0.1108 - mae: 0.2631

13/13 [==============================] - 0s 16ms/step - loss: 0.1108 - mae: 0.2631 - val_loss: 0.0357 - val_mae: 0.1584
Epoch 5/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0988 - mae: 0.2713
13/13 [==============================] - 0s 2ms/step - loss: 0.0931 - mae: 0.2465

13/13 [==============================] - 0s 18ms/step - loss: 0.0931 - mae: 0.2465 - val_loss: 0.0288 - val_mae: 0.1414
Epoch 6/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0885 - mae: 0.2475
13/13 [==============================] - 0s 2ms/step - loss: 0.0974 - mae: 0.2524

13/13 [==============================] - 0s 17ms/step - loss: 0.0974 - mae: 0.2524 - val_loss: 0.0238 - val_mae: 0.1270
Epoch 7/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0700 - mae: 0.2160
13/13 [==============================] - 0s 2ms/step - loss: 0.0849 - mae: 0.2358

13/13 [==============================] - 0s 17ms/step - loss: 0.0849 - mae: 0.2358 - val_loss: 0.0202 - val_mae: 0.1167
Epoch 8/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0849 - mae: 0.2316
13/13 [==============================] - 0s 3ms/step - loss: 0.0736 - mae: 0.2199

13/13 [==============================] - 0s 18ms/step - loss: 0.0736 - mae: 0.2199 - val_loss: 0.0176 - val_mae: 0.1091
Epoch 9/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0703 - mae: 0.2129
13/13 [==============================] - 0s 2ms/step - loss: 0.0666 - mae: 0.2080

13/13 [==============================] - 0s 17ms/step - loss: 0.0666 - mae: 0.2080 - val_loss: 0.0153 - val_mae: 0.1016
Epoch 10/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0651 - mae: 0.2063
13/13 [==============================] - 0s 2ms/step - loss: 0.0685 - mae: 0.2078

13/13 [==============================] - 0s 16ms/step - loss: 0.0685 - mae: 0.2078 - val_loss: 0.0137 - val_mae: 0.0963
Epoch 11/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0647 - mae: 0.2045
13/13 [==============================] - 0s 35ms/step - loss: 0.0731 - mae: 0.2164

13/13 [==============================] - 1s 50ms/step - loss: 0.0731 - mae: 0.2164 - val_loss: 0.0119 - val_mae: 0.0897
Epoch 12/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0516 - mae: 0.1886
13/13 [==============================] - 0s 2ms/step - loss: 0.0563 - mae: 0.1868

13/13 [==============================] - 0s 16ms/step - loss: 0.0563 - mae: 0.1868 - val_loss: 0.0104 - val_mae: 0.0846
Epoch 13/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0868 - mae: 0.2388
13/13 [==============================] - 0s 2ms/step - loss: 0.0621 - mae: 0.2018

13/13 [==============================] - 0s 16ms/step - loss: 0.0621 - mae: 0.2018 - val_loss: 0.0097 - val_mae: 0.0815
Epoch 14/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0402 - mae: 0.1510
13/13 [==============================] - 0s 2ms/step - loss: 0.0580 - mae: 0.1883

13/13 [==============================] - 0s 17ms/step - loss: 0.0580 - mae: 0.1883 - val_loss: 0.0091 - val_mae: 0.0787
Epoch 15/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0542 - mae: 0.1961
13/13 [==============================] - 0s 2ms/step - loss: 0.0528 - mae: 0.1836

13/13 [==============================] - 0s 16ms/step - loss: 0.0528 - mae: 0.1836 - val_loss: 0.0082 - val_mae: 0.0746
Epoch 16/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0670 - mae: 0.2137
13/13 [==============================] - 0s 2ms/step - loss: 0.0519 - mae: 0.1810

13/13 [==============================] - 0s 17ms/step - loss: 0.0519 - mae: 0.1810 - val_loss: 0.0076 - val_mae: 0.0715
Epoch 17/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0612 - mae: 0.2037
13/13 [==============================] - 0s 2ms/step - loss: 0.0516 - mae: 0.1802

13/13 [==============================] - 0s 16ms/step - loss: 0.0516 - mae: 0.1802 - val_loss: 0.0072 - val_mae: 0.0696
Epoch 18/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0559 - mae: 0.1858
13/13 [==============================] - 0s 2ms/step - loss: 0.0525 - mae: 0.1840

13/13 [==============================] - 0s 16ms/step - loss: 0.0525 - mae: 0.1840 - val_loss: 0.0069 - val_mae: 0.0678
Epoch 19/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0318 - mae: 0.1487
13/13 [==============================] - 0s 3ms/step - loss: 0.0465 - mae: 0.1732

13/13 [==============================] - 0s 17ms/step - loss: 0.0465 - mae: 0.1732 - val_loss: 0.0065 - val_mae: 0.0657
Epoch 20/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0560 - mae: 0.2021
13/13 [==============================] - 0s 2ms/step - loss: 0.0453 - mae: 0.1676

13/13 [==============================] - 0s 16ms/step - loss: 0.0453 - mae: 0.1676 - val_loss: 0.0062 - val_mae: 0.0636
Epoch 21/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0409 - mae: 0.1642
13/13 [==============================] - 0s 2ms/step - loss: 0.0464 - mae: 0.1680

13/13 [==============================] - 0s 16ms/step - loss: 0.0464 - mae: 0.1680 - val_loss: 0.0061 - val_mae: 0.0627
Epoch 22/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0634 - mae: 0.1944
13/13 [==============================] - 0s 2ms/step - loss: 0.0504 - mae: 0.1791

13/13 [==============================] - 0s 17ms/step - loss: 0.0504 - mae: 0.1791 - val_loss: 0.0057 - val_mae: 0.0600
Epoch 23/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0475 - mae: 0.1783
13/13 [==============================] - 0s 2ms/step - loss: 0.0437 - mae: 0.1652

13/13 [==============================] - 0s 17ms/step - loss: 0.0437 - mae: 0.1652 - val_loss: 0.0058 - val_mae: 0.0604
Epoch 24/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0523 - mae: 0.1743
13/13 [==============================] - 0s 3ms/step - loss: 0.0468 - mae: 0.1682

13/13 [==============================] - 0s 18ms/step - loss: 0.0468 - mae: 0.1682 - val_loss: 0.0058 - val_mae: 0.0605
Epoch 25/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0405 - mae: 0.1653
13/13 [==============================] - 0s 2ms/step - loss: 0.0428 - mae: 0.1636

13/13 [==============================] - 0s 17ms/step - loss: 0.0428 - mae: 0.1636 - val_loss: 0.0059 - val_mae: 0.0609
Epoch 26/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0307 - mae: 0.1520
13/13 [==============================] - 0s 2ms/step - loss: 0.0457 - mae: 0.1710

13/13 [==============================] - 0s 16ms/step - loss: 0.0457 - mae: 0.1710 - val_loss: 0.0050 - val_mae: 0.0560
Epoch 27/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0311 - mae: 0.1405
13/13 [==============================] - 0s 2ms/step - loss: 0.0382 - mae: 0.1567

13/13 [==============================] - 0s 16ms/step - loss: 0.0382 - mae: 0.1567 - val_loss: 0.0050 - val_mae: 0.0567
Epoch 28/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0453 - mae: 0.1767
13/13 [==============================] - 0s 2ms/step - loss: 0.0413 - mae: 0.1638

13/13 [==============================] - 0s 18ms/step - loss: 0.0413 - mae: 0.1638 - val_loss: 0.0050 - val_mae: 0.0550
Epoch 29/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0440 - mae: 0.1719
13/13 [==============================] - 0s 3ms/step - loss: 0.0341 - mae: 0.1424

13/13 [==============================] - 0s 17ms/step - loss: 0.0341 - mae: 0.1424 - val_loss: 0.0048 - val_mae: 0.0552
Epoch 30/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0329 - mae: 0.1339
13/13 [==============================] - 0s 2ms/step - loss: 0.0370 - mae: 0.1530

13/13 [==============================] - 0s 16ms/step - loss: 0.0370 - mae: 0.1530 - val_loss: 0.0047 - val_mae: 0.0538

Run completed: runs/2022-12-07T02-27-21Z

Training run 2/52 (flags = list(16, 32, 0.001, 50, 30, "relu", "relu", 0.6, 0.2)) 
Using run directory runs/2022-12-07T02-29-17Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

1/8 [==>...........................] - ETA: 2s - loss: 0.6333 - mae: 0.6845
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0013s vs `on_train_batch_end` time: 0.0022s). Check your callbacks.

8/8 [==============================] - 0s 2ms/step - loss: 0.8957 - mae: 0.7813

8/8 [==============================] - 1s 104ms/step - loss: 0.8957 - mae: 0.7813 - val_loss: 0.4076 - val_mae: 0.5581
Epoch 2/30

1/8 [==>...........................] - ETA: 0s - loss: 1.0125 - mae: 0.8134
8/8 [==============================] - 0s 2ms/step - loss: 0.9061 - mae: 0.7561

8/8 [==============================] - 0s 26ms/step - loss: 0.9061 - mae: 0.7561 - val_loss: 0.3285 - val_mae: 0.4961
Epoch 3/30

1/8 [==>...........................] - ETA: 0s - loss: 0.9857 - mae: 0.7922
8/8 [==============================] - 0s 2ms/step - loss: 0.7318 - mae: 0.6933

8/8 [==============================] - 0s 28ms/step - loss: 0.7318 - mae: 0.6933 - val_loss: 0.2744 - val_mae: 0.4455
Epoch 4/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6649 - mae: 0.6421
8/8 [==============================] - 0s 3ms/step - loss: 0.6732 - mae: 0.6634

8/8 [==============================] - 0s 32ms/step - loss: 0.6732 - mae: 0.6634 - val_loss: 0.2351 - val_mae: 0.4048
Epoch 5/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6118 - mae: 0.6391
8/8 [==============================] - 0s 3ms/step - loss: 0.5235 - mae: 0.5893

8/8 [==============================] - 0s 29ms/step - loss: 0.5235 - mae: 0.5893 - val_loss: 0.2184 - val_mae: 0.3879
Epoch 6/30

1/8 [==>...........................] - ETA: 0s - loss: 0.4320 - mae: 0.5626
8/8 [==============================] - 0s 3ms/step - loss: 0.5640 - mae: 0.6055

8/8 [==============================] - 0s 28ms/step - loss: 0.5640 - mae: 0.6055 - val_loss: 0.1964 - val_mae: 0.3639
Epoch 7/30

1/8 [==>...........................] - ETA: 0s - loss: 0.5482 - mae: 0.6162
8/8 [==============================] - 0s 2ms/step - loss: 0.5733 - mae: 0.6239

8/8 [==============================] - 0s 26ms/step - loss: 0.5733 - mae: 0.6239 - val_loss: 0.1761 - val_mae: 0.3415
Epoch 8/30

1/8 [==>...........................] - ETA: 0s - loss: 0.5098 - mae: 0.5894
8/8 [==============================] - 0s 3ms/step - loss: 0.4515 - mae: 0.5409

8/8 [==============================] - 0s 31ms/step - loss: 0.4515 - mae: 0.5409 - val_loss: 0.1611 - val_mae: 0.3244
Epoch 9/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3901 - mae: 0.4897
8/8 [==============================] - 0s 3ms/step - loss: 0.5074 - mae: 0.5705

8/8 [==============================] - 0s 32ms/step - loss: 0.5074 - mae: 0.5705 - val_loss: 0.1453 - val_mae: 0.3077
Epoch 10/30

1/8 [==>...........................] - ETA: 0s - loss: 0.4915 - mae: 0.5901
8/8 [==============================] - 0s 3ms/step - loss: 0.4640 - mae: 0.5622

8/8 [==============================] - 0s 28ms/step - loss: 0.4640 - mae: 0.5622 - val_loss: 0.1368 - val_mae: 0.2990
Epoch 11/30

1/8 [==>...........................] - ETA: 0s - loss: 0.4925 - mae: 0.6012
8/8 [==============================] - 0s 2ms/step - loss: 0.4652 - mae: 0.5596

8/8 [==============================] - 0s 29ms/step - loss: 0.4652 - mae: 0.5596 - val_loss: 0.1264 - val_mae: 0.2878
Epoch 12/30

1/8 [==>...........................] - ETA: 0s - loss: 0.4880 - mae: 0.5965
8/8 [==============================] - 0s 2ms/step - loss: 0.3561 - mae: 0.4854

8/8 [==============================] - 0s 27ms/step - loss: 0.3561 - mae: 0.4854 - val_loss: 0.1175 - val_mae: 0.2771
Epoch 13/30

1/8 [==>...........................] - ETA: 0s - loss: 0.4935 - mae: 0.5952
8/8 [==============================] - 0s 3ms/step - loss: 0.3812 - mae: 0.5181

8/8 [==============================] - 0s 26ms/step - loss: 0.3812 - mae: 0.5181 - val_loss: 0.1116 - val_mae: 0.2701
Epoch 14/30

1/8 [==>...........................] - ETA: 0s - loss: 0.5102 - mae: 0.5474
8/8 [==============================] - 0s 2ms/step - loss: 0.3944 - mae: 0.5002

8/8 [==============================] - 0s 28ms/step - loss: 0.3944 - mae: 0.5002 - val_loss: 0.1054 - val_mae: 0.2629
Epoch 15/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3117 - mae: 0.4613
8/8 [==============================] - 0s 2ms/step - loss: 0.3694 - mae: 0.5113

8/8 [==============================] - 0s 27ms/step - loss: 0.3694 - mae: 0.5113 - val_loss: 0.0993 - val_mae: 0.2558
Epoch 16/30

1/8 [==>...........................] - ETA: 0s - loss: 0.4874 - mae: 0.5654
8/8 [==============================] - 0s 3ms/step - loss: 0.3610 - mae: 0.4824

8/8 [==============================] - 0s 27ms/step - loss: 0.3610 - mae: 0.4824 - val_loss: 0.0956 - val_mae: 0.2516
Epoch 17/30

1/8 [==>...........................] - ETA: 0s - loss: 0.4409 - mae: 0.5512
8/8 [==============================] - 0s 3ms/step - loss: 0.3513 - mae: 0.4819

8/8 [==============================] - 0s 26ms/step - loss: 0.3513 - mae: 0.4819 - val_loss: 0.0899 - val_mae: 0.2456
Epoch 18/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3511 - mae: 0.4789
8/8 [==============================] - 0s 2ms/step - loss: 0.3425 - mae: 0.4774

8/8 [==============================] - 0s 27ms/step - loss: 0.3425 - mae: 0.4774 - val_loss: 0.0872 - val_mae: 0.2420
Epoch 19/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3313 - mae: 0.4691
8/8 [==============================] - 0s 3ms/step - loss: 0.3691 - mae: 0.4887

8/8 [==============================] - 0s 28ms/step - loss: 0.3691 - mae: 0.4887 - val_loss: 0.0833 - val_mae: 0.2377
Epoch 20/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3204 - mae: 0.4403
8/8 [==============================] - 0s 3ms/step - loss: 0.3724 - mae: 0.4952

8/8 [==============================] - 0s 28ms/step - loss: 0.3724 - mae: 0.4952 - val_loss: 0.0813 - val_mae: 0.2350
Epoch 21/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3110 - mae: 0.4707
8/8 [==============================] - 0s 2ms/step - loss: 0.3143 - mae: 0.4527

8/8 [==============================] - 0s 26ms/step - loss: 0.3143 - mae: 0.4527 - val_loss: 0.0767 - val_mae: 0.2301
Epoch 22/30

1/8 [==>...........................] - ETA: 0s - loss: 0.4138 - mae: 0.5108
8/8 [==============================] - 0s 2ms/step - loss: 0.3399 - mae: 0.4680

8/8 [==============================] - 0s 27ms/step - loss: 0.3399 - mae: 0.4680 - val_loss: 0.0748 - val_mae: 0.2278
Epoch 23/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3009 - mae: 0.4794
8/8 [==============================] - 0s 2ms/step - loss: 0.3431 - mae: 0.4704

8/8 [==============================] - 0s 27ms/step - loss: 0.3431 - mae: 0.4704 - val_loss: 0.0735 - val_mae: 0.2258
Epoch 24/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2551 - mae: 0.3978
8/8 [==============================] - 0s 3ms/step - loss: 0.2457 - mae: 0.4079

8/8 [==============================] - 0s 28ms/step - loss: 0.2457 - mae: 0.4079 - val_loss: 0.0707 - val_mae: 0.2227
Epoch 25/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3698 - mae: 0.5225
8/8 [==============================] - 0s 3ms/step - loss: 0.2821 - mae: 0.4398

8/8 [==============================] - 0s 28ms/step - loss: 0.2821 - mae: 0.4398 - val_loss: 0.0687 - val_mae: 0.2202
Epoch 26/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2772 - mae: 0.4283
8/8 [==============================] - 0s 2ms/step - loss: 0.3282 - mae: 0.4556

8/8 [==============================] - 0s 28ms/step - loss: 0.3282 - mae: 0.4556 - val_loss: 0.0663 - val_mae: 0.2172
Epoch 27/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3722 - mae: 0.5183
8/8 [==============================] - 0s 2ms/step - loss: 0.3432 - mae: 0.4781

8/8 [==============================] - 0s 26ms/step - loss: 0.3432 - mae: 0.4781 - val_loss: 0.0659 - val_mae: 0.2158
Epoch 28/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2195 - mae: 0.3741
8/8 [==============================] - 0s 2ms/step - loss: 0.3221 - mae: 0.4513

8/8 [==============================] - 0s 26ms/step - loss: 0.3221 - mae: 0.4513 - val_loss: 0.0641 - val_mae: 0.2134
Epoch 29/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3450 - mae: 0.4789
8/8 [==============================] - 0s 3ms/step - loss: 0.2946 - mae: 0.4337

8/8 [==============================] - 0s 29ms/step - loss: 0.2946 - mae: 0.4337 - val_loss: 0.0625 - val_mae: 0.2110
Epoch 30/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2988 - mae: 0.4332
8/8 [==============================] - 0s 2ms/step - loss: 0.2652 - mae: 0.4162

8/8 [==============================] - 0s 28ms/step - loss: 0.2652 - mae: 0.4162 - val_loss: 0.0620 - val_mae: 0.2099

Run completed: runs/2022-12-07T02-29-17Z

Training run 3/52 (flags = list(32, 10, 0.001, 30, 30, "relu", "sigmoid", 0.6, 0.6)) 
Using run directory runs/2022-12-07T02-31-03Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

 1/13 [=>............................] - ETA: 5s - loss: 2.4736 - mae: 1.4398
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0012s vs `on_train_batch_end` time: 0.0023s). Check your callbacks.

13/13 [==============================] - 0s 2ms/step - loss: 1.7136 - mae: 1.1390

13/13 [==============================] - 1s 64ms/step - loss: 1.7136 - mae: 1.1390 - val_loss: 1.1421 - val_mae: 1.0561
Epoch 2/30

 1/13 [=>............................] - ETA: 0s - loss: 1.9441 - mae: 1.2425
13/13 [==============================] - 0s 3ms/step - loss: 1.5438 - mae: 1.0686

13/13 [==============================] - 0s 19ms/step - loss: 1.5438 - mae: 1.0686 - val_loss: 0.9483 - val_mae: 0.9604
Epoch 3/30

 1/13 [=>............................] - ETA: 0s - loss: 1.7014 - mae: 1.1425
13/13 [==============================] - 0s 2ms/step - loss: 1.3060 - mae: 0.9680

13/13 [==============================] - 0s 19ms/step - loss: 1.3060 - mae: 0.9680 - val_loss: 0.7818 - val_mae: 0.8697
Epoch 4/30

 1/13 [=>............................] - ETA: 0s - loss: 1.3425 - mae: 1.0149
13/13 [==============================] - 0s 2ms/step - loss: 1.1436 - mae: 0.9018

13/13 [==============================] - 0s 17ms/step - loss: 1.1436 - mae: 0.9018 - val_loss: 0.6503 - val_mae: 0.7909
Epoch 5/30

 1/13 [=>............................] - ETA: 0s - loss: 1.0487 - mae: 0.8668
13/13 [==============================] - 0s 2ms/step - loss: 1.0068 - mae: 0.8418

13/13 [==============================] - 0s 17ms/step - loss: 1.0068 - mae: 0.8418 - val_loss: 0.5316 - val_mae: 0.7124
Epoch 6/30

 1/13 [=>............................] - ETA: 0s - loss: 0.8426 - mae: 0.8119
13/13 [==============================] - 0s 3ms/step - loss: 0.8787 - mae: 0.7958

13/13 [==============================] - 0s 19ms/step - loss: 0.8787 - mae: 0.7958 - val_loss: 0.4250 - val_mae: 0.6336
Epoch 7/30

 1/13 [=>............................] - ETA: 0s - loss: 0.7934 - mae: 0.7550
13/13 [==============================] - 0s 3ms/step - loss: 0.7511 - mae: 0.7194

13/13 [==============================] - 0s 20ms/step - loss: 0.7511 - mae: 0.7194 - val_loss: 0.3588 - val_mae: 0.5792
Epoch 8/30

 1/13 [=>............................] - ETA: 0s - loss: 1.1189 - mae: 0.8640
13/13 [==============================] - 0s 2ms/step - loss: 0.7404 - mae: 0.7205

13/13 [==============================] - 0s 17ms/step - loss: 0.7404 - mae: 0.7205 - val_loss: 0.2983 - val_mae: 0.5249
Epoch 9/30

 1/13 [=>............................] - ETA: 0s - loss: 0.6977 - mae: 0.6550
13/13 [==============================] - 0s 2ms/step - loss: 0.6361 - mae: 0.6495

13/13 [==============================] - 0s 17ms/step - loss: 0.6361 - mae: 0.6495 - val_loss: 0.2513 - val_mae: 0.4785
Epoch 10/30

 1/13 [=>............................] - ETA: 0s - loss: 0.5844 - mae: 0.6491
13/13 [==============================] - 0s 2ms/step - loss: 0.5158 - mae: 0.5782

13/13 [==============================] - 0s 17ms/step - loss: 0.5158 - mae: 0.5782 - val_loss: 0.2141 - val_mae: 0.4390
Epoch 11/30

 1/13 [=>............................] - ETA: 0s - loss: 0.6303 - mae: 0.6753
13/13 [==============================] - 0s 3ms/step - loss: 0.5612 - mae: 0.6193

13/13 [==============================] - 0s 19ms/step - loss: 0.5612 - mae: 0.6193 - val_loss: 0.1717 - val_mae: 0.3904
Epoch 12/30

 1/13 [=>............................] - ETA: 0s - loss: 0.3248 - mae: 0.4883
13/13 [==============================] - 0s 3ms/step - loss: 0.4463 - mae: 0.5406

13/13 [==============================] - 0s 18ms/step - loss: 0.4463 - mae: 0.5406 - val_loss: 0.1503 - val_mae: 0.3634
Epoch 13/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4154 - mae: 0.5318
13/13 [==============================] - 0s 3ms/step - loss: 0.4557 - mae: 0.5457

13/13 [==============================] - 0s 18ms/step - loss: 0.4557 - mae: 0.5457 - val_loss: 0.1242 - val_mae: 0.3271
Epoch 14/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4563 - mae: 0.5454
13/13 [==============================] - 0s 2ms/step - loss: 0.4218 - mae: 0.5359

13/13 [==============================] - 0s 18ms/step - loss: 0.4218 - mae: 0.5359 - val_loss: 0.1067 - val_mae: 0.3002
Epoch 15/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2613 - mae: 0.3834
13/13 [==============================] - 0s 2ms/step - loss: 0.3403 - mae: 0.4707

13/13 [==============================] - 0s 17ms/step - loss: 0.3403 - mae: 0.4707 - val_loss: 0.0954 - val_mae: 0.2815
Epoch 16/30

 1/13 [=>............................] - ETA: 0s - loss: 0.3040 - mae: 0.4344
13/13 [==============================] - 0s 2ms/step - loss: 0.3840 - mae: 0.5032

13/13 [==============================] - 0s 17ms/step - loss: 0.3840 - mae: 0.5032 - val_loss: 0.0843 - val_mae: 0.2628
Epoch 17/30

 1/13 [=>............................] - ETA: 0s - loss: 0.5716 - mae: 0.6043
13/13 [==============================] - 0s 2ms/step - loss: 0.3434 - mae: 0.4679

13/13 [==============================] - 0s 17ms/step - loss: 0.3434 - mae: 0.4679 - val_loss: 0.0736 - val_mae: 0.2436
Epoch 18/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2786 - mae: 0.4067
13/13 [==============================] - 0s 3ms/step - loss: 0.3829 - mae: 0.4985

13/13 [==============================] - 0s 17ms/step - loss: 0.3829 - mae: 0.4985 - val_loss: 0.0632 - val_mae: 0.2245
Epoch 19/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4131 - mae: 0.5122
13/13 [==============================] - 0s 3ms/step - loss: 0.3306 - mae: 0.4633

13/13 [==============================] - 0s 17ms/step - loss: 0.3306 - mae: 0.4633 - val_loss: 0.0554 - val_mae: 0.2089
Epoch 20/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2587 - mae: 0.3863
13/13 [==============================] - 0s 2ms/step - loss: 0.3045 - mae: 0.4583

13/13 [==============================] - 0s 17ms/step - loss: 0.3045 - mae: 0.4583 - val_loss: 0.0529 - val_mae: 0.2037
Epoch 21/30

 1/13 [=>............................] - ETA: 0s - loss: 0.3523 - mae: 0.4547
13/13 [==============================] - 0s 3ms/step - loss: 0.3301 - mae: 0.4671

13/13 [==============================] - 0s 17ms/step - loss: 0.3301 - mae: 0.4671 - val_loss: 0.0467 - val_mae: 0.1897
Epoch 22/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2028 - mae: 0.3637
13/13 [==============================] - 0s 2ms/step - loss: 0.3120 - mae: 0.4542

13/13 [==============================] - 0s 17ms/step - loss: 0.3120 - mae: 0.4542 - val_loss: 0.0411 - val_mae: 0.1758
Epoch 23/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2981 - mae: 0.4131
13/13 [==============================] - 0s 2ms/step - loss: 0.2997 - mae: 0.4497

13/13 [==============================] - 0s 18ms/step - loss: 0.2997 - mae: 0.4497 - val_loss: 0.0375 - val_mae: 0.1657
Epoch 24/30

 1/13 [=>............................] - ETA: 0s - loss: 0.3581 - mae: 0.4892
13/13 [==============================] - 0s 2ms/step - loss: 0.2765 - mae: 0.4212

13/13 [==============================] - 0s 17ms/step - loss: 0.2765 - mae: 0.4212 - val_loss: 0.0347 - val_mae: 0.1576
Epoch 25/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2604 - mae: 0.3959
13/13 [==============================] - 0s 2ms/step - loss: 0.2657 - mae: 0.4099

13/13 [==============================] - 0s 17ms/step - loss: 0.2657 - mae: 0.4099 - val_loss: 0.0318 - val_mae: 0.1482
Epoch 26/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2909 - mae: 0.4372
13/13 [==============================] - 0s 2ms/step - loss: 0.2692 - mae: 0.4221

13/13 [==============================] - 0s 17ms/step - loss: 0.2692 - mae: 0.4221 - val_loss: 0.0293 - val_mae: 0.1397
Epoch 27/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2666 - mae: 0.4394
13/13 [==============================] - 0s 2ms/step - loss: 0.2757 - mae: 0.4339

13/13 [==============================] - 0s 17ms/step - loss: 0.2757 - mae: 0.4339 - val_loss: 0.0289 - val_mae: 0.1386
Epoch 28/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2013 - mae: 0.3832
13/13 [==============================] - 0s 3ms/step - loss: 0.2854 - mae: 0.4348

13/13 [==============================] - 0s 18ms/step - loss: 0.2854 - mae: 0.4348 - val_loss: 0.0266 - val_mae: 0.1304
Epoch 29/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1837 - mae: 0.3488
13/13 [==============================] - 0s 2ms/step - loss: 0.2698 - mae: 0.4217

13/13 [==============================] - 0s 17ms/step - loss: 0.2698 - mae: 0.4217 - val_loss: 0.0253 - val_mae: 0.1258
Epoch 30/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2270 - mae: 0.4051
13/13 [==============================] - 0s 2ms/step - loss: 0.2329 - mae: 0.3859

13/13 [==============================] - 0s 17ms/step - loss: 0.2329 - mae: 0.3859 - val_loss: 0.0253 - val_mae: 0.1259

Run completed: runs/2022-12-07T02-31-03Z

Training run 4/52 (flags = list(64, 50, 0.001, 50, 50, "relu", "sigmoid", 0.6, 0.2)) 
Using run directory runs/2022-12-07T02-32-08Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

1/8 [==>...........................] - ETA: 2s - loss: 4.4047 - mae: 2.0671
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0014s vs `on_train_batch_end` time: 0.0027s). Check your callbacks.

8/8 [==============================] - 0s 2ms/step - loss: 3.6480 - mae: 1.8718

8/8 [==============================] - 1s 103ms/step - loss: 3.6480 - mae: 1.8718 - val_loss: 2.6103 - val_mae: 1.6119
Epoch 2/50

1/8 [==>...........................] - ETA: 0s - loss: 2.7918 - mae: 1.6139
8/8 [==============================] - 0s 2ms/step - loss: 2.2554 - mae: 1.4477

8/8 [==============================] - 0s 26ms/step - loss: 2.2554 - mae: 1.4477 - val_loss: 1.6989 - val_mae: 1.2988
Epoch 3/50

1/8 [==>...........................] - ETA: 0s - loss: 1.6686 - mae: 1.2298
8/8 [==============================] - 0s 2ms/step - loss: 1.4875 - mae: 1.1558

8/8 [==============================] - 0s 26ms/step - loss: 1.4875 - mae: 1.1558 - val_loss: 1.1100 - val_mae: 1.0479
Epoch 4/50

1/8 [==>...........................] - ETA: 0s - loss: 1.3036 - mae: 1.0758
8/8 [==============================] - 0s 3ms/step - loss: 1.0992 - mae: 0.9809

8/8 [==============================] - 0s 31ms/step - loss: 1.0992 - mae: 0.9809 - val_loss: 0.7114 - val_mae: 0.8363
Epoch 5/50

1/8 [==>...........................] - ETA: 0s - loss: 0.9172 - mae: 0.8732
8/8 [==============================] - 0s 4ms/step - loss: 0.7639 - mae: 0.7909

8/8 [==============================] - 0s 35ms/step - loss: 0.7639 - mae: 0.7909 - val_loss: 0.4574 - val_mae: 0.6674
Epoch 6/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6270 - mae: 0.7108
8/8 [==============================] - 0s 3ms/step - loss: 0.5075 - mae: 0.6166

8/8 [==============================] - 0s 29ms/step - loss: 0.5075 - mae: 0.6166 - val_loss: 0.3087 - val_mae: 0.5448
Epoch 7/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4110 - mae: 0.5566
8/8 [==============================] - 0s 2ms/step - loss: 0.3851 - mae: 0.5321

8/8 [==============================] - 0s 27ms/step - loss: 0.3851 - mae: 0.5321 - val_loss: 0.2044 - val_mae: 0.4388
Epoch 8/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3776 - mae: 0.5174
8/8 [==============================] - 0s 2ms/step - loss: 0.2897 - mae: 0.4496

8/8 [==============================] - 0s 27ms/step - loss: 0.2897 - mae: 0.4496 - val_loss: 0.1349 - val_mae: 0.3509
Epoch 9/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2327 - mae: 0.3986
8/8 [==============================] - 0s 3ms/step - loss: 0.2654 - mae: 0.4132

8/8 [==============================] - 0s 27ms/step - loss: 0.2654 - mae: 0.4132 - val_loss: 0.0900 - val_mae: 0.2831
Epoch 10/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2913 - mae: 0.4581
8/8 [==============================] - 0s 2ms/step - loss: 0.2309 - mae: 0.3849

8/8 [==============================] - 0s 26ms/step - loss: 0.2309 - mae: 0.3849 - val_loss: 0.0574 - val_mae: 0.2205
Epoch 11/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1478 - mae: 0.3025
8/8 [==============================] - 0s 3ms/step - loss: 0.1799 - mae: 0.3468

8/8 [==============================] - 0s 29ms/step - loss: 0.1799 - mae: 0.3468 - val_loss: 0.0391 - val_mae: 0.1779
Epoch 12/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1471 - mae: 0.3021
8/8 [==============================] - 0s 2ms/step - loss: 0.1473 - mae: 0.3095

8/8 [==============================] - 0s 29ms/step - loss: 0.1473 - mae: 0.3095 - val_loss: 0.0295 - val_mae: 0.1507
Epoch 13/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2057 - mae: 0.3742
8/8 [==============================] - 0s 3ms/step - loss: 0.1670 - mae: 0.3288

8/8 [==============================] - 0s 35ms/step - loss: 0.1670 - mae: 0.3288 - val_loss: 0.0228 - val_mae: 0.1268
Epoch 14/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1024 - mae: 0.2442
8/8 [==============================] - 0s 2ms/step - loss: 0.1680 - mae: 0.3254

8/8 [==============================] - 0s 29ms/step - loss: 0.1680 - mae: 0.3254 - val_loss: 0.0192 - val_mae: 0.1120
Epoch 15/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1302 - mae: 0.3032
8/8 [==============================] - 0s 2ms/step - loss: 0.1563 - mae: 0.3093

8/8 [==============================] - 0s 27ms/step - loss: 0.1563 - mae: 0.3093 - val_loss: 0.0164 - val_mae: 0.0997
Epoch 16/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1656 - mae: 0.3185
8/8 [==============================] - 0s 3ms/step - loss: 0.1544 - mae: 0.3152

8/8 [==============================] - 0s 29ms/step - loss: 0.1544 - mae: 0.3152 - val_loss: 0.0151 - val_mae: 0.0941
Epoch 17/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1377 - mae: 0.2960
8/8 [==============================] - 0s 3ms/step - loss: 0.1285 - mae: 0.2864

8/8 [==============================] - 0s 26ms/step - loss: 0.1285 - mae: 0.2864 - val_loss: 0.0141 - val_mae: 0.0897
Epoch 18/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1746 - mae: 0.3113
8/8 [==============================] - 0s 3ms/step - loss: 0.1346 - mae: 0.2873

8/8 [==============================] - 0s 27ms/step - loss: 0.1346 - mae: 0.2873 - val_loss: 0.0131 - val_mae: 0.0849
Epoch 19/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1015 - mae: 0.2560
8/8 [==============================] - 0s 3ms/step - loss: 0.1373 - mae: 0.2982

8/8 [==============================] - 0s 27ms/step - loss: 0.1373 - mae: 0.2982 - val_loss: 0.0122 - val_mae: 0.0807
Epoch 20/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1791 - mae: 0.3213
8/8 [==============================] - 0s 2ms/step - loss: 0.1621 - mae: 0.3203

8/8 [==============================] - 0s 26ms/step - loss: 0.1621 - mae: 0.3203 - val_loss: 0.0119 - val_mae: 0.0794
Epoch 21/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1618 - mae: 0.3313
8/8 [==============================] - 0s 2ms/step - loss: 0.1535 - mae: 0.3175

8/8 [==============================] - 0s 26ms/step - loss: 0.1535 - mae: 0.3175 - val_loss: 0.0119 - val_mae: 0.0793
Epoch 22/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1717 - mae: 0.3284
8/8 [==============================] - 0s 2ms/step - loss: 0.1379 - mae: 0.2936

8/8 [==============================] - 0s 27ms/step - loss: 0.1379 - mae: 0.2936 - val_loss: 0.0118 - val_mae: 0.0789
Epoch 23/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1273 - mae: 0.2673
8/8 [==============================] - 0s 3ms/step - loss: 0.1468 - mae: 0.3047

8/8 [==============================] - 0s 26ms/step - loss: 0.1468 - mae: 0.3047 - val_loss: 0.0116 - val_mae: 0.0786
Epoch 24/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1320 - mae: 0.2827
8/8 [==============================] - 0s 3ms/step - loss: 0.1556 - mae: 0.3052

8/8 [==============================] - 0s 27ms/step - loss: 0.1556 - mae: 0.3052 - val_loss: 0.0114 - val_mae: 0.0776
Epoch 25/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2138 - mae: 0.3938
8/8 [==============================] - 0s 3ms/step - loss: 0.1364 - mae: 0.3010

8/8 [==============================] - 0s 27ms/step - loss: 0.1364 - mae: 0.3010 - val_loss: 0.0113 - val_mae: 0.0774
Epoch 26/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1561 - mae: 0.2952
8/8 [==============================] - 0s 3ms/step - loss: 0.1227 - mae: 0.2846

8/8 [==============================] - 0s 28ms/step - loss: 0.1227 - mae: 0.2846 - val_loss: 0.0114 - val_mae: 0.0777
Epoch 27/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1513 - mae: 0.3190
8/8 [==============================] - 0s 3ms/step - loss: 0.1193 - mae: 0.2750

8/8 [==============================] - 0s 26ms/step - loss: 0.1193 - mae: 0.2750 - val_loss: 0.0113 - val_mae: 0.0775
Epoch 28/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1435 - mae: 0.3090
8/8 [==============================] - 0s 3ms/step - loss: 0.1424 - mae: 0.3035

8/8 [==============================] - 0s 29ms/step - loss: 0.1424 - mae: 0.3035 - val_loss: 0.0110 - val_mae: 0.0763
Epoch 29/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1146 - mae: 0.2780
8/8 [==============================] - 0s 3ms/step - loss: 0.1346 - mae: 0.2900

8/8 [==============================] - 0s 27ms/step - loss: 0.1346 - mae: 0.2900 - val_loss: 0.0109 - val_mae: 0.0759
Epoch 30/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1444 - mae: 0.2732
8/8 [==============================] - 0s 3ms/step - loss: 0.1445 - mae: 0.2948

8/8 [==============================] - 0s 28ms/step - loss: 0.1445 - mae: 0.2948 - val_loss: 0.0108 - val_mae: 0.0758
Epoch 31/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1175 - mae: 0.2741
8/8 [==============================] - 0s 3ms/step - loss: 0.1376 - mae: 0.2958

8/8 [==============================] - 0s 27ms/step - loss: 0.1376 - mae: 0.2958 - val_loss: 0.0108 - val_mae: 0.0757
Epoch 32/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1799 - mae: 0.3230
8/8 [==============================] - 0s 2ms/step - loss: 0.1608 - mae: 0.3224

8/8 [==============================] - 0s 29ms/step - loss: 0.1608 - mae: 0.3224 - val_loss: 0.0107 - val_mae: 0.0755
Epoch 33/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1487 - mae: 0.2983
8/8 [==============================] - 0s 2ms/step - loss: 0.1216 - mae: 0.2770

8/8 [==============================] - 0s 27ms/step - loss: 0.1216 - mae: 0.2770 - val_loss: 0.0106 - val_mae: 0.0754
Epoch 34/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1192 - mae: 0.2684
8/8 [==============================] - 0s 2ms/step - loss: 0.1314 - mae: 0.2833

8/8 [==============================] - 0s 26ms/step - loss: 0.1314 - mae: 0.2833 - val_loss: 0.0106 - val_mae: 0.0753
Epoch 35/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1348 - mae: 0.2951
8/8 [==============================] - 0s 3ms/step - loss: 0.1408 - mae: 0.3035

8/8 [==============================] - 0s 27ms/step - loss: 0.1408 - mae: 0.3035 - val_loss: 0.0106 - val_mae: 0.0755
Epoch 36/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1294 - mae: 0.2844
8/8 [==============================] - 0s 3ms/step - loss: 0.1566 - mae: 0.3152

8/8 [==============================] - 0s 35ms/step - loss: 0.1566 - mae: 0.3152 - val_loss: 0.0105 - val_mae: 0.0750
Epoch 37/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1241 - mae: 0.2857
8/8 [==============================] - 0s 3ms/step - loss: 0.1372 - mae: 0.2987

8/8 [==============================] - 0s 33ms/step - loss: 0.1372 - mae: 0.2987 - val_loss: 0.0105 - val_mae: 0.0748
Epoch 38/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1557 - mae: 0.3083
8/8 [==============================] - 0s 2ms/step - loss: 0.1419 - mae: 0.2918

8/8 [==============================] - 0s 27ms/step - loss: 0.1419 - mae: 0.2918 - val_loss: 0.0104 - val_mae: 0.0747
Epoch 39/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1802 - mae: 0.3419
8/8 [==============================] - 0s 3ms/step - loss: 0.1590 - mae: 0.3182

8/8 [==============================] - 0s 26ms/step - loss: 0.1590 - mae: 0.3182 - val_loss: 0.0104 - val_mae: 0.0745
Epoch 40/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1470 - mae: 0.3146
8/8 [==============================] - 0s 3ms/step - loss: 0.1323 - mae: 0.2933

8/8 [==============================] - 0s 26ms/step - loss: 0.1323 - mae: 0.2933 - val_loss: 0.0103 - val_mae: 0.0744
Epoch 41/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1213 - mae: 0.2819
8/8 [==============================] - 0s 3ms/step - loss: 0.1298 - mae: 0.2808

8/8 [==============================] - 0s 29ms/step - loss: 0.1298 - mae: 0.2808 - val_loss: 0.0103 - val_mae: 0.0743
Epoch 42/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1414 - mae: 0.2911
8/8 [==============================] - 0s 3ms/step - loss: 0.1421 - mae: 0.2991

8/8 [==============================] - 0s 28ms/step - loss: 0.1421 - mae: 0.2991 - val_loss: 0.0103 - val_mae: 0.0742
Epoch 43/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1530 - mae: 0.3126
8/8 [==============================] - 0s 3ms/step - loss: 0.1369 - mae: 0.2994

8/8 [==============================] - 0s 29ms/step - loss: 0.1369 - mae: 0.2994 - val_loss: 0.0102 - val_mae: 0.0741
Epoch 44/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1343 - mae: 0.2863
8/8 [==============================] - 0s 3ms/step - loss: 0.1308 - mae: 0.2823

8/8 [==============================] - 0s 29ms/step - loss: 0.1308 - mae: 0.2823 - val_loss: 0.0102 - val_mae: 0.0740
Epoch 45/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1314 - mae: 0.2824
8/8 [==============================] - 0s 4ms/step - loss: 0.1413 - mae: 0.2969

8/8 [==============================] - 0s 34ms/step - loss: 0.1413 - mae: 0.2969 - val_loss: 0.0102 - val_mae: 0.0739
Epoch 46/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1012 - mae: 0.2779
8/8 [==============================] - 0s 2ms/step - loss: 0.1220 - mae: 0.2842

8/8 [==============================] - 0s 29ms/step - loss: 0.1220 - mae: 0.2842 - val_loss: 0.0102 - val_mae: 0.0738
Epoch 47/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1742 - mae: 0.3233
8/8 [==============================] - 0s 3ms/step - loss: 0.1504 - mae: 0.3081

8/8 [==============================] - 0s 58ms/step - loss: 0.1504 - mae: 0.3081 - val_loss: 0.0102 - val_mae: 0.0738
Epoch 48/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1358 - mae: 0.3024
8/8 [==============================] - 0s 2ms/step - loss: 0.1298 - mae: 0.2886

8/8 [==============================] - 0s 40ms/step - loss: 0.1298 - mae: 0.2886 - val_loss: 0.0103 - val_mae: 0.0741
Epoch 49/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1089 - mae: 0.2646
8/8 [==============================] - 0s 2ms/step - loss: 0.1233 - mae: 0.2846

8/8 [==============================] - 0s 27ms/step - loss: 0.1233 - mae: 0.2846 - val_loss: 0.0103 - val_mae: 0.0743
Epoch 50/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0951 - mae: 0.2428
8/8 [==============================] - 0s 3ms/step - loss: 0.1339 - mae: 0.2900

8/8 [==============================] - 0s 28ms/step - loss: 0.1339 - mae: 0.2900 - val_loss: 0.0102 - val_mae: 0.0737

Run completed: runs/2022-12-07T02-32-08Z

Training run 5/52 (flags = list(64, 50, 0.01, 50, 30, "relu", "sigmoid", 0.2, 0.6)) 
Using run directory runs/2022-12-07T02-32-30Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

1/8 [==>...........................] - ETA: 2s - loss: 0.7635 - mae: 0.7111
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0014s vs `on_train_batch_end` time: 0.0025s). Check your callbacks.

8/8 [==============================] - 0s 2ms/step - loss: 0.8435 - mae: 0.7503

8/8 [==============================] - 1s 112ms/step - loss: 0.8435 - mae: 0.7503 - val_loss: 0.0133 - val_mae: 0.0799
Epoch 2/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6048 - mae: 0.6595
8/8 [==============================] - 0s 3ms/step - loss: 0.6238 - mae: 0.6436

8/8 [==============================] - 0s 30ms/step - loss: 0.6238 - mae: 0.6436 - val_loss: 0.0292 - val_mae: 0.1300
Epoch 3/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6635 - mae: 0.6211
8/8 [==============================] - 0s 2ms/step - loss: 0.5786 - mae: 0.5915

8/8 [==============================] - 0s 26ms/step - loss: 0.5786 - mae: 0.5915 - val_loss: 0.0126 - val_mae: 0.0899
Epoch 4/30

1/8 [==>...........................] - ETA: 0s - loss: 0.7499 - mae: 0.6932
8/8 [==============================] - 0s 3ms/step - loss: 0.5876 - mae: 0.6204

8/8 [==============================] - 0s 28ms/step - loss: 0.5876 - mae: 0.6204 - val_loss: 0.0110 - val_mae: 0.0777
Epoch 5/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2728 - mae: 0.4048
8/8 [==============================] - 0s 3ms/step - loss: 0.3817 - mae: 0.4885

8/8 [==============================] - 0s 28ms/step - loss: 0.3817 - mae: 0.4885 - val_loss: 0.0129 - val_mae: 0.0717
Epoch 6/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3380 - mae: 0.4735
8/8 [==============================] - 0s 3ms/step - loss: 0.3741 - mae: 0.4863

8/8 [==============================] - 0s 31ms/step - loss: 0.3741 - mae: 0.4863 - val_loss: 0.0135 - val_mae: 0.1008
Epoch 7/30

1/8 [==>...........................] - ETA: 0s - loss: 0.4134 - mae: 0.5338
8/8 [==============================] - 0s 3ms/step - loss: 0.3478 - mae: 0.4815

8/8 [==============================] - 0s 35ms/step - loss: 0.3478 - mae: 0.4815 - val_loss: 0.0141 - val_mae: 0.1042
Epoch 8/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3203 - mae: 0.4276
8/8 [==============================] - 0s 3ms/step - loss: 0.3695 - mae: 0.4708

8/8 [==============================] - 0s 29ms/step - loss: 0.3695 - mae: 0.4708 - val_loss: 0.0100 - val_mae: 0.0768
Epoch 9/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3065 - mae: 0.4477
8/8 [==============================] - 0s 3ms/step - loss: 0.2839 - mae: 0.4284

8/8 [==============================] - 0s 28ms/step - loss: 0.2839 - mae: 0.4284 - val_loss: 0.0101 - val_mae: 0.0662
Epoch 10/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3125 - mae: 0.4421
8/8 [==============================] - 0s 3ms/step - loss: 0.2639 - mae: 0.4102

8/8 [==============================] - 0s 30ms/step - loss: 0.2639 - mae: 0.4102 - val_loss: 0.0107 - val_mae: 0.0664
Epoch 11/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2066 - mae: 0.3772
8/8 [==============================] - 0s 2ms/step - loss: 0.2449 - mae: 0.3963

8/8 [==============================] - 0s 28ms/step - loss: 0.2449 - mae: 0.3963 - val_loss: 0.0094 - val_mae: 0.0689
Epoch 12/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3048 - mae: 0.4586
8/8 [==============================] - 0s 2ms/step - loss: 0.2164 - mae: 0.3682

8/8 [==============================] - 0s 26ms/step - loss: 0.2164 - mae: 0.3682 - val_loss: 0.0094 - val_mae: 0.0711
Epoch 13/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2176 - mae: 0.3735
8/8 [==============================] - 0s 3ms/step - loss: 0.1785 - mae: 0.3406

8/8 [==============================] - 0s 29ms/step - loss: 0.1785 - mae: 0.3406 - val_loss: 0.0093 - val_mae: 0.0671
Epoch 14/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1736 - mae: 0.3382
8/8 [==============================] - 0s 3ms/step - loss: 0.1561 - mae: 0.3134

8/8 [==============================] - 0s 35ms/step - loss: 0.1561 - mae: 0.3134 - val_loss: 0.0094 - val_mae: 0.0714
Epoch 15/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1487 - mae: 0.3061
8/8 [==============================] - 0s 3ms/step - loss: 0.1409 - mae: 0.2999

8/8 [==============================] - 0s 33ms/step - loss: 0.1409 - mae: 0.2999 - val_loss: 0.0105 - val_mae: 0.0836
Epoch 16/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1569 - mae: 0.3140
8/8 [==============================] - 0s 2ms/step - loss: 0.1535 - mae: 0.3130

8/8 [==============================] - 0s 27ms/step - loss: 0.1535 - mae: 0.3130 - val_loss: 0.0098 - val_mae: 0.0760
Epoch 17/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0965 - mae: 0.2367
8/8 [==============================] - 0s 2ms/step - loss: 0.1165 - mae: 0.2740

8/8 [==============================] - 0s 27ms/step - loss: 0.1165 - mae: 0.2740 - val_loss: 0.0102 - val_mae: 0.0794
Epoch 18/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1221 - mae: 0.2866
8/8 [==============================] - 0s 3ms/step - loss: 0.1195 - mae: 0.2837

8/8 [==============================] - 0s 29ms/step - loss: 0.1195 - mae: 0.2837 - val_loss: 0.0100 - val_mae: 0.0728
Epoch 19/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0960 - mae: 0.2599
8/8 [==============================] - 0s 3ms/step - loss: 0.1004 - mae: 0.2536

8/8 [==============================] - 0s 29ms/step - loss: 0.1004 - mae: 0.2536 - val_loss: 0.0122 - val_mae: 0.0921
Epoch 20/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1095 - mae: 0.2757
8/8 [==============================] - 0s 3ms/step - loss: 0.1080 - mae: 0.2617

8/8 [==============================] - 0s 27ms/step - loss: 0.1080 - mae: 0.2617 - val_loss: 0.0101 - val_mae: 0.0758
Epoch 21/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0966 - mae: 0.2210
8/8 [==============================] - 0s 2ms/step - loss: 0.0889 - mae: 0.2394

8/8 [==============================] - 0s 29ms/step - loss: 0.0889 - mae: 0.2394 - val_loss: 0.0103 - val_mae: 0.0750
Epoch 22/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0871 - mae: 0.2436
8/8 [==============================] - 0s 2ms/step - loss: 0.0815 - mae: 0.2262

8/8 [==============================] - 0s 26ms/step - loss: 0.0815 - mae: 0.2262 - val_loss: 0.0103 - val_mae: 0.0745
Epoch 23/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0781 - mae: 0.2346
8/8 [==============================] - 0s 2ms/step - loss: 0.0741 - mae: 0.2162

8/8 [==============================] - 0s 27ms/step - loss: 0.0741 - mae: 0.2162 - val_loss: 0.0110 - val_mae: 0.0816
Epoch 24/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0935 - mae: 0.2515
8/8 [==============================] - 0s 3ms/step - loss: 0.0705 - mae: 0.2169

8/8 [==============================] - 0s 28ms/step - loss: 0.0705 - mae: 0.2169 - val_loss: 0.0106 - val_mae: 0.0770
Epoch 25/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0638 - mae: 0.1951
8/8 [==============================] - 0s 3ms/step - loss: 0.0590 - mae: 0.1938

8/8 [==============================] - 0s 28ms/step - loss: 0.0590 - mae: 0.1938 - val_loss: 0.0107 - val_mae: 0.0764
Epoch 26/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0756 - mae: 0.2231
8/8 [==============================] - 0s 3ms/step - loss: 0.0554 - mae: 0.1891

8/8 [==============================] - 0s 27ms/step - loss: 0.0554 - mae: 0.1891 - val_loss: 0.0113 - val_mae: 0.0823
Epoch 27/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0563 - mae: 0.1850
8/8 [==============================] - 0s 3ms/step - loss: 0.0527 - mae: 0.1854

8/8 [==============================] - 0s 27ms/step - loss: 0.0527 - mae: 0.1854 - val_loss: 0.0108 - val_mae: 0.0771
Epoch 28/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0676 - mae: 0.2132
8/8 [==============================] - 0s 2ms/step - loss: 0.0566 - mae: 0.1918

8/8 [==============================] - 0s 27ms/step - loss: 0.0566 - mae: 0.1918 - val_loss: 0.0109 - val_mae: 0.0775
Epoch 29/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0476 - mae: 0.1687
8/8 [==============================] - 0s 2ms/step - loss: 0.0449 - mae: 0.1669

8/8 [==============================] - 0s 26ms/step - loss: 0.0449 - mae: 0.1669 - val_loss: 0.0110 - val_mae: 0.0782
Epoch 30/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0550 - mae: 0.1922
8/8 [==============================] - 0s 2ms/step - loss: 0.0448 - mae: 0.1708

8/8 [==============================] - 0s 27ms/step - loss: 0.0448 - mae: 0.1708 - val_loss: 0.0118 - val_mae: 0.0855

Run completed: runs/2022-12-07T02-32-30Z

Training run 6/52 (flags = list(64, 50, 0.01, 30, 50, "relu", "tanh", 0.2, 0.2)) 
Using run directory runs/2022-12-07T02-32-48Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

 1/13 [=>............................] - ETA: 5s - loss: 1.3172 - mae: 0.9813
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0010s vs `on_train_batch_end` time: 0.0042s). Check your callbacks.

13/13 [==============================] - 0s 2ms/step - loss: 0.3768 - mae: 0.4789

13/13 [==============================] - 1s 64ms/step - loss: 0.3768 - mae: 0.4789 - val_loss: 0.0516 - val_mae: 0.1730
Epoch 2/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1652 - mae: 0.2832
13/13 [==============================] - 0s 2ms/step - loss: 0.1679 - mae: 0.3099

13/13 [==============================] - 0s 17ms/step - loss: 0.1679 - mae: 0.3099 - val_loss: 0.0299 - val_mae: 0.1392
Epoch 3/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1592 - mae: 0.3184
13/13 [==============================] - 0s 3ms/step - loss: 0.1138 - mae: 0.2620

13/13 [==============================] - 0s 18ms/step - loss: 0.1138 - mae: 0.2620 - val_loss: 0.0220 - val_mae: 0.1222
Epoch 4/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0826 - mae: 0.2383
13/13 [==============================] - 0s 2ms/step - loss: 0.1031 - mae: 0.2512

13/13 [==============================] - 0s 17ms/step - loss: 0.1031 - mae: 0.2512 - val_loss: 0.0171 - val_mae: 0.1079
Epoch 5/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0885 - mae: 0.2466
13/13 [==============================] - 0s 4ms/step - loss: 0.0942 - mae: 0.2448

13/13 [==============================] - 0s 23ms/step - loss: 0.0942 - mae: 0.2448 - val_loss: 0.0163 - val_mae: 0.1061
Epoch 6/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0836 - mae: 0.2146
13/13 [==============================] - 0s 3ms/step - loss: 0.1045 - mae: 0.2538

13/13 [==============================] - 0s 19ms/step - loss: 0.1045 - mae: 0.2538 - val_loss: 0.0118 - val_mae: 0.0896
Epoch 7/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0724 - mae: 0.2107
13/13 [==============================] - 0s 2ms/step - loss: 0.0872 - mae: 0.2270

13/13 [==============================] - 0s 17ms/step - loss: 0.0872 - mae: 0.2270 - val_loss: 0.0146 - val_mae: 0.1046
Epoch 8/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0777 - mae: 0.2241
13/13 [==============================] - 0s 2ms/step - loss: 0.0818 - mae: 0.2243

13/13 [==============================] - 0s 18ms/step - loss: 0.0818 - mae: 0.2243 - val_loss: 0.0113 - val_mae: 0.0889
Epoch 9/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1144 - mae: 0.2687
13/13 [==============================] - 0s 2ms/step - loss: 0.0779 - mae: 0.2144

13/13 [==============================] - 0s 17ms/step - loss: 0.0779 - mae: 0.2144 - val_loss: 0.0090 - val_mae: 0.0789
Epoch 10/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1058 - mae: 0.2366
13/13 [==============================] - 0s 2ms/step - loss: 0.0757 - mae: 0.2110

13/13 [==============================] - 0s 17ms/step - loss: 0.0757 - mae: 0.2110 - val_loss: 0.0087 - val_mae: 0.0809
Epoch 11/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1046 - mae: 0.2385
13/13 [==============================] - 0s 2ms/step - loss: 0.0691 - mae: 0.2048

13/13 [==============================] - 0s 17ms/step - loss: 0.0691 - mae: 0.2048 - val_loss: 0.0083 - val_mae: 0.0750
Epoch 12/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0589 - mae: 0.2097
13/13 [==============================] - 0s 3ms/step - loss: 0.0743 - mae: 0.2135

13/13 [==============================] - 0s 21ms/step - loss: 0.0743 - mae: 0.2135 - val_loss: 0.0070 - val_mae: 0.0699
Epoch 13/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0723 - mae: 0.2071
13/13 [==============================] - 0s 3ms/step - loss: 0.0654 - mae: 0.2007

13/13 [==============================] - 0s 21ms/step - loss: 0.0654 - mae: 0.2007 - val_loss: 0.0084 - val_mae: 0.0687
Epoch 14/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0593 - mae: 0.1935
13/13 [==============================] - 0s 3ms/step - loss: 0.0683 - mae: 0.2037

13/13 [==============================] - 0s 17ms/step - loss: 0.0683 - mae: 0.2037 - val_loss: 0.0063 - val_mae: 0.0602
Epoch 15/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0388 - mae: 0.1746
13/13 [==============================] - 0s 2ms/step - loss: 0.0550 - mae: 0.1843

13/13 [==============================] - 0s 18ms/step - loss: 0.0550 - mae: 0.1843 - val_loss: 0.0068 - val_mae: 0.0713
Epoch 16/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0661 - mae: 0.1966
13/13 [==============================] - 0s 2ms/step - loss: 0.0587 - mae: 0.1906

13/13 [==============================] - 0s 16ms/step - loss: 0.0587 - mae: 0.1906 - val_loss: 0.0087 - val_mae: 0.0831
Epoch 17/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0602 - mae: 0.2000
13/13 [==============================] - 0s 3ms/step - loss: 0.0513 - mae: 0.1830

13/13 [==============================] - 0s 18ms/step - loss: 0.0513 - mae: 0.1830 - val_loss: 0.0082 - val_mae: 0.0756
Epoch 18/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0665 - mae: 0.1905
13/13 [==============================] - 0s 2ms/step - loss: 0.0540 - mae: 0.1785

13/13 [==============================] - 0s 18ms/step - loss: 0.0540 - mae: 0.1785 - val_loss: 0.0052 - val_mae: 0.0552
Epoch 19/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0606 - mae: 0.1832
13/13 [==============================] - 0s 2ms/step - loss: 0.0510 - mae: 0.1745

13/13 [==============================] - 0s 17ms/step - loss: 0.0510 - mae: 0.1745 - val_loss: 0.0053 - val_mae: 0.0582
Epoch 20/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0503 - mae: 0.1726
13/13 [==============================] - 0s 2ms/step - loss: 0.0471 - mae: 0.1706

13/13 [==============================] - 0s 17ms/step - loss: 0.0471 - mae: 0.1706 - val_loss: 0.0068 - val_mae: 0.0718
Epoch 21/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0256 - mae: 0.1198
13/13 [==============================] - 0s 2ms/step - loss: 0.0445 - mae: 0.1671

13/13 [==============================] - 0s 17ms/step - loss: 0.0445 - mae: 0.1671 - val_loss: 0.0057 - val_mae: 0.0648
Epoch 22/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0692 - mae: 0.2035
13/13 [==============================] - 0s 2ms/step - loss: 0.0496 - mae: 0.1736

13/13 [==============================] - 0s 18ms/step - loss: 0.0496 - mae: 0.1736 - val_loss: 0.0054 - val_mae: 0.0603
Epoch 23/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0442 - mae: 0.1729
13/13 [==============================] - 0s 2ms/step - loss: 0.0408 - mae: 0.1599

13/13 [==============================] - 0s 17ms/step - loss: 0.0408 - mae: 0.1599 - val_loss: 0.0051 - val_mae: 0.0595
Epoch 24/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0198 - mae: 0.1126
13/13 [==============================] - 0s 3ms/step - loss: 0.0429 - mae: 0.1637

13/13 [==============================] - 0s 18ms/step - loss: 0.0429 - mae: 0.1637 - val_loss: 0.0048 - val_mae: 0.0549
Epoch 25/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0594 - mae: 0.1813
13/13 [==============================] - 0s 2ms/step - loss: 0.0458 - mae: 0.1625

13/13 [==============================] - 0s 17ms/step - loss: 0.0458 - mae: 0.1625 - val_loss: 0.0068 - val_mae: 0.0679
Epoch 26/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0446 - mae: 0.1714
13/13 [==============================] - 0s 3ms/step - loss: 0.0408 - mae: 0.1570

13/13 [==============================] - 0s 17ms/step - loss: 0.0408 - mae: 0.1570 - val_loss: 0.0046 - val_mae: 0.0530
Epoch 27/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0373 - mae: 0.1658
13/13 [==============================] - 0s 3ms/step - loss: 0.0405 - mae: 0.1563

13/13 [==============================] - 0s 18ms/step - loss: 0.0405 - mae: 0.1563 - val_loss: 0.0051 - val_mae: 0.0598
Epoch 28/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0443 - mae: 0.1840
13/13 [==============================] - 0s 3ms/step - loss: 0.0392 - mae: 0.1539

13/13 [==============================] - 0s 17ms/step - loss: 0.0392 - mae: 0.1539 - val_loss: 0.0070 - val_mae: 0.0678
Epoch 29/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0514 - mae: 0.1903
13/13 [==============================] - 0s 3ms/step - loss: 0.0380 - mae: 0.1524

13/13 [==============================] - 0s 17ms/step - loss: 0.0380 - mae: 0.1524 - val_loss: 0.0051 - val_mae: 0.0551
Epoch 30/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0366 - mae: 0.1439
13/13 [==============================] - 0s 2ms/step - loss: 0.0347 - mae: 0.1472

13/13 [==============================] - 0s 17ms/step - loss: 0.0347 - mae: 0.1472 - val_loss: 0.0047 - val_mae: 0.0502
Epoch 31/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0203 - mae: 0.1206
13/13 [==============================] - 0s 3ms/step - loss: 0.0287 - mae: 0.1369

13/13 [==============================] - 0s 17ms/step - loss: 0.0287 - mae: 0.1369 - val_loss: 0.0045 - val_mae: 0.0490
Epoch 32/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0435 - mae: 0.1647
13/13 [==============================] - 0s 2ms/step - loss: 0.0370 - mae: 0.1496

13/13 [==============================] - 0s 17ms/step - loss: 0.0370 - mae: 0.1496 - val_loss: 0.0043 - val_mae: 0.0485
Epoch 33/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0434 - mae: 0.1640
13/13 [==============================] - 0s 2ms/step - loss: 0.0368 - mae: 0.1512

13/13 [==============================] - 0s 17ms/step - loss: 0.0368 - mae: 0.1512 - val_loss: 0.0043 - val_mae: 0.0499
Epoch 34/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0301 - mae: 0.1327
13/13 [==============================] - 0s 3ms/step - loss: 0.0317 - mae: 0.1395

13/13 [==============================] - 0s 17ms/step - loss: 0.0317 - mae: 0.1395 - val_loss: 0.0045 - val_mae: 0.0530
Epoch 35/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0458 - mae: 0.1691
13/13 [==============================] - 0s 2ms/step - loss: 0.0353 - mae: 0.1466

13/13 [==============================] - 0s 17ms/step - loss: 0.0353 - mae: 0.1466 - val_loss: 0.0042 - val_mae: 0.0476
Epoch 36/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0247 - mae: 0.1369
13/13 [==============================] - 0s 2ms/step - loss: 0.0325 - mae: 0.1406

13/13 [==============================] - 0s 17ms/step - loss: 0.0325 - mae: 0.1406 - val_loss: 0.0043 - val_mae: 0.0483
Epoch 37/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0477 - mae: 0.1640
13/13 [==============================] - 0s 2ms/step - loss: 0.0337 - mae: 0.1416

13/13 [==============================] - 0s 17ms/step - loss: 0.0337 - mae: 0.1416 - val_loss: 0.0045 - val_mae: 0.0529
Epoch 38/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0279 - mae: 0.1224
13/13 [==============================] - 0s 3ms/step - loss: 0.0326 - mae: 0.1367

13/13 [==============================] - 0s 17ms/step - loss: 0.0326 - mae: 0.1367 - val_loss: 0.0045 - val_mae: 0.0519
Epoch 39/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0244 - mae: 0.1324
13/13 [==============================] - 0s 3ms/step - loss: 0.0331 - mae: 0.1430

13/13 [==============================] - 0s 18ms/step - loss: 0.0331 - mae: 0.1430 - val_loss: 0.0044 - val_mae: 0.0476
Epoch 40/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0400 - mae: 0.1444
13/13 [==============================] - 0s 3ms/step - loss: 0.0321 - mae: 0.1408

13/13 [==============================] - 0s 18ms/step - loss: 0.0321 - mae: 0.1408 - val_loss: 0.0042 - val_mae: 0.0460
Epoch 41/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0418 - mae: 0.1579
13/13 [==============================] - 0s 2ms/step - loss: 0.0288 - mae: 0.1329

13/13 [==============================] - 0s 17ms/step - loss: 0.0288 - mae: 0.1329 - val_loss: 0.0041 - val_mae: 0.0480
Epoch 42/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0266 - mae: 0.1359
13/13 [==============================] - 0s 2ms/step - loss: 0.0291 - mae: 0.1378

13/13 [==============================] - 0s 18ms/step - loss: 0.0291 - mae: 0.1378 - val_loss: 0.0042 - val_mae: 0.0486
Epoch 43/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0200 - mae: 0.1125
13/13 [==============================] - 0s 2ms/step - loss: 0.0264 - mae: 0.1278

13/13 [==============================] - 0s 17ms/step - loss: 0.0264 - mae: 0.1278 - val_loss: 0.0042 - val_mae: 0.0498
Epoch 44/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0269 - mae: 0.1324
13/13 [==============================] - 0s 3ms/step - loss: 0.0287 - mae: 0.1329

13/13 [==============================] - 0s 18ms/step - loss: 0.0287 - mae: 0.1329 - val_loss: 0.0045 - val_mae: 0.0556
Epoch 45/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0200 - mae: 0.1155
13/13 [==============================] - 0s 3ms/step - loss: 0.0275 - mae: 0.1299

13/13 [==============================] - 0s 17ms/step - loss: 0.0275 - mae: 0.1299 - val_loss: 0.0040 - val_mae: 0.0465
Epoch 46/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0513 - mae: 0.1782
13/13 [==============================] - 0s 2ms/step - loss: 0.0295 - mae: 0.1338

13/13 [==============================] - 0s 17ms/step - loss: 0.0295 - mae: 0.1338 - val_loss: 0.0048 - val_mae: 0.0563
Epoch 47/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0194 - mae: 0.1013
13/13 [==============================] - 0s 2ms/step - loss: 0.0283 - mae: 0.1314

13/13 [==============================] - 0s 17ms/step - loss: 0.0283 - mae: 0.1314 - val_loss: 0.0062 - val_mae: 0.0649
Epoch 48/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0405 - mae: 0.1599
13/13 [==============================] - 0s 3ms/step - loss: 0.0269 - mae: 0.1266

13/13 [==============================] - 0s 17ms/step - loss: 0.0269 - mae: 0.1266 - val_loss: 0.0040 - val_mae: 0.0479
Epoch 49/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0281 - mae: 0.1438
13/13 [==============================] - 0s 2ms/step - loss: 0.0262 - mae: 0.1282

13/13 [==============================] - 0s 17ms/step - loss: 0.0262 - mae: 0.1282 - val_loss: 0.0040 - val_mae: 0.0497
Epoch 50/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0454 - mae: 0.1692
13/13 [==============================] - 0s 3ms/step - loss: 0.0251 - mae: 0.1244

13/13 [==============================] - 0s 18ms/step - loss: 0.0251 - mae: 0.1244 - val_loss: 0.0044 - val_mae: 0.0528

Run completed: runs/2022-12-07T02-32-48Z

Training run 7/52 (flags = list(16, 10, 0.01, 30, 50, "tanh", "sigmoid", 0.6, 0.6)) 
Using run directory runs/2022-12-07T02-33-12Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

 1/13 [=>............................] - ETA: 5s - loss: 0.6290 - mae: 0.6314
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0018s vs `on_train_batch_end` time: 0.0027s). Check your callbacks.

13/13 [==============================] - 0s 2ms/step - loss: 0.6292 - mae: 0.6288

13/13 [==============================] - 1s 63ms/step - loss: 0.6292 - mae: 0.6288 - val_loss: 0.0123 - val_mae: 0.0920
Epoch 2/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3386 - mae: 0.4857
13/13 [==============================] - 0s 2ms/step - loss: 0.4444 - mae: 0.5336

13/13 [==============================] - 0s 17ms/step - loss: 0.4444 - mae: 0.5336 - val_loss: 0.0191 - val_mae: 0.1187
Epoch 3/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3617 - mae: 0.4774
13/13 [==============================] - 0s 3ms/step - loss: 0.3550 - mae: 0.4750

13/13 [==============================] - 0s 25ms/step - loss: 0.3550 - mae: 0.4750 - val_loss: 0.0189 - val_mae: 0.1188
Epoch 4/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3669 - mae: 0.5039
13/13 [==============================] - 0s 3ms/step - loss: 0.3104 - mae: 0.4550

13/13 [==============================] - 0s 19ms/step - loss: 0.3104 - mae: 0.4550 - val_loss: 0.0135 - val_mae: 0.0982
Epoch 5/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2232 - mae: 0.3983
13/13 [==============================] - 0s 2ms/step - loss: 0.2659 - mae: 0.4176

13/13 [==============================] - 0s 17ms/step - loss: 0.2659 - mae: 0.4176 - val_loss: 0.0255 - val_mae: 0.1378
Epoch 6/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2317 - mae: 0.3934
13/13 [==============================] - 0s 3ms/step - loss: 0.2157 - mae: 0.3721

13/13 [==============================] - 0s 18ms/step - loss: 0.2157 - mae: 0.3721 - val_loss: 0.0250 - val_mae: 0.1353
Epoch 7/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2408 - mae: 0.4048
13/13 [==============================] - 0s 3ms/step - loss: 0.1742 - mae: 0.3322

13/13 [==============================] - 0s 22ms/step - loss: 0.1742 - mae: 0.3322 - val_loss: 0.0198 - val_mae: 0.1198
Epoch 8/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2665 - mae: 0.4091
13/13 [==============================] - 0s 3ms/step - loss: 0.1730 - mae: 0.3375

13/13 [==============================] - 0s 22ms/step - loss: 0.1730 - mae: 0.3375 - val_loss: 0.0162 - val_mae: 0.1062
Epoch 9/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1634 - mae: 0.3481
13/13 [==============================] - 0s 2ms/step - loss: 0.1431 - mae: 0.3117

13/13 [==============================] - 0s 17ms/step - loss: 0.1431 - mae: 0.3117 - val_loss: 0.0170 - val_mae: 0.1078
Epoch 10/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1010 - mae: 0.2542
13/13 [==============================] - 0s 2ms/step - loss: 0.0999 - mae: 0.2508

13/13 [==============================] - 0s 18ms/step - loss: 0.0999 - mae: 0.2508 - val_loss: 0.0211 - val_mae: 0.1214
Epoch 11/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0827 - mae: 0.2303
13/13 [==============================] - 0s 3ms/step - loss: 0.1045 - mae: 0.2636

13/13 [==============================] - 0s 18ms/step - loss: 0.1045 - mae: 0.2636 - val_loss: 0.0193 - val_mae: 0.1147
Epoch 12/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0760 - mae: 0.2324
13/13 [==============================] - 0s 3ms/step - loss: 0.0948 - mae: 0.2536

13/13 [==============================] - 0s 18ms/step - loss: 0.0948 - mae: 0.2536 - val_loss: 0.0183 - val_mae: 0.1104
Epoch 13/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0570 - mae: 0.1969
13/13 [==============================] - 0s 3ms/step - loss: 0.0774 - mae: 0.2235

13/13 [==============================] - 0s 17ms/step - loss: 0.0774 - mae: 0.2235 - val_loss: 0.0180 - val_mae: 0.1086
Epoch 14/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0734 - mae: 0.2171
13/13 [==============================] - 0s 2ms/step - loss: 0.0760 - mae: 0.2246

13/13 [==============================] - 0s 18ms/step - loss: 0.0760 - mae: 0.2246 - val_loss: 0.0181 - val_mae: 0.1081
Epoch 15/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0570 - mae: 0.2102
13/13 [==============================] - 0s 2ms/step - loss: 0.0586 - mae: 0.1950

13/13 [==============================] - 0s 16ms/step - loss: 0.0586 - mae: 0.1950 - val_loss: 0.0205 - val_mae: 0.1163
Epoch 16/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0777 - mae: 0.2048
13/13 [==============================] - 0s 2ms/step - loss: 0.0652 - mae: 0.2037

13/13 [==============================] - 0s 17ms/step - loss: 0.0652 - mae: 0.2037 - val_loss: 0.0172 - val_mae: 0.1057
Epoch 17/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0423 - mae: 0.1709
13/13 [==============================] - 0s 2ms/step - loss: 0.0580 - mae: 0.1902

13/13 [==============================] - 0s 18ms/step - loss: 0.0580 - mae: 0.1902 - val_loss: 0.0164 - val_mae: 0.1032
Epoch 18/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0488 - mae: 0.1796
13/13 [==============================] - 0s 2ms/step - loss: 0.0559 - mae: 0.1920

13/13 [==============================] - 0s 17ms/step - loss: 0.0559 - mae: 0.1920 - val_loss: 0.0173 - val_mae: 0.1057
Epoch 19/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0664 - mae: 0.2069
13/13 [==============================] - 0s 2ms/step - loss: 0.0489 - mae: 0.1769

13/13 [==============================] - 0s 17ms/step - loss: 0.0489 - mae: 0.1769 - val_loss: 0.0176 - val_mae: 0.1067
Epoch 20/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0446 - mae: 0.1646
13/13 [==============================] - 0s 2ms/step - loss: 0.0498 - mae: 0.1767

13/13 [==============================] - 0s 17ms/step - loss: 0.0498 - mae: 0.1767 - val_loss: 0.0171 - val_mae: 0.1053
Epoch 21/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0535 - mae: 0.1920
13/13 [==============================] - 0s 3ms/step - loss: 0.0409 - mae: 0.1642

13/13 [==============================] - 0s 17ms/step - loss: 0.0409 - mae: 0.1642 - val_loss: 0.0167 - val_mae: 0.1040
Epoch 22/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0376 - mae: 0.1697
13/13 [==============================] - 0s 2ms/step - loss: 0.0391 - mae: 0.1625

13/13 [==============================] - 0s 18ms/step - loss: 0.0391 - mae: 0.1625 - val_loss: 0.0170 - val_mae: 0.1046
Epoch 23/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0444 - mae: 0.1835
13/13 [==============================] - 0s 2ms/step - loss: 0.0376 - mae: 0.1576

13/13 [==============================] - 0s 18ms/step - loss: 0.0376 - mae: 0.1576 - val_loss: 0.0195 - val_mae: 0.1115
Epoch 24/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0421 - mae: 0.1598
13/13 [==============================] - 0s 2ms/step - loss: 0.0374 - mae: 0.1534

13/13 [==============================] - 0s 18ms/step - loss: 0.0374 - mae: 0.1534 - val_loss: 0.0179 - val_mae: 0.1068
Epoch 25/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0377 - mae: 0.1575
13/13 [==============================] - 0s 2ms/step - loss: 0.0365 - mae: 0.1554

13/13 [==============================] - 0s 17ms/step - loss: 0.0365 - mae: 0.1554 - val_loss: 0.0176 - val_mae: 0.1063
Epoch 26/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0299 - mae: 0.1471
13/13 [==============================] - 0s 2ms/step - loss: 0.0313 - mae: 0.1428

13/13 [==============================] - 0s 19ms/step - loss: 0.0313 - mae: 0.1428 - val_loss: 0.0170 - val_mae: 0.1045
Epoch 27/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0421 - mae: 0.1826
13/13 [==============================] - 0s 2ms/step - loss: 0.0318 - mae: 0.1443

13/13 [==============================] - 0s 17ms/step - loss: 0.0318 - mae: 0.1443 - val_loss: 0.0162 - val_mae: 0.1019
Epoch 28/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0246 - mae: 0.1310
13/13 [==============================] - 0s 2ms/step - loss: 0.0320 - mae: 0.1479

13/13 [==============================] - 0s 17ms/step - loss: 0.0320 - mae: 0.1479 - val_loss: 0.0168 - val_mae: 0.1037
Epoch 29/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0383 - mae: 0.1630
13/13 [==============================] - 0s 3ms/step - loss: 0.0313 - mae: 0.1400

13/13 [==============================] - 0s 17ms/step - loss: 0.0313 - mae: 0.1400 - val_loss: 0.0175 - val_mae: 0.1060
Epoch 30/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0372 - mae: 0.1441
13/13 [==============================] - 0s 2ms/step - loss: 0.0325 - mae: 0.1443

13/13 [==============================] - 0s 18ms/step - loss: 0.0325 - mae: 0.1443 - val_loss: 0.0169 - val_mae: 0.1040
Epoch 31/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0170 - mae: 0.1009
13/13 [==============================] - 0s 3ms/step - loss: 0.0285 - mae: 0.1371

13/13 [==============================] - 0s 18ms/step - loss: 0.0285 - mae: 0.1371 - val_loss: 0.0169 - val_mae: 0.1041
Epoch 32/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0283 - mae: 0.1421
13/13 [==============================] - 0s 2ms/step - loss: 0.0252 - mae: 0.1277

13/13 [==============================] - 0s 18ms/step - loss: 0.0252 - mae: 0.1277 - val_loss: 0.0167 - val_mae: 0.1032
Epoch 33/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0314 - mae: 0.1333
13/13 [==============================] - 0s 3ms/step - loss: 0.0278 - mae: 0.1323

13/13 [==============================] - 0s 19ms/step - loss: 0.0278 - mae: 0.1323 - val_loss: 0.0173 - val_mae: 0.1051
Epoch 34/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0242 - mae: 0.1267
13/13 [==============================] - 0s 2ms/step - loss: 0.0276 - mae: 0.1334

13/13 [==============================] - 0s 18ms/step - loss: 0.0276 - mae: 0.1334 - val_loss: 0.0163 - val_mae: 0.1017
Epoch 35/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0260 - mae: 0.1383
13/13 [==============================] - 0s 3ms/step - loss: 0.0235 - mae: 0.1270

13/13 [==============================] - 0s 18ms/step - loss: 0.0235 - mae: 0.1270 - val_loss: 0.0156 - val_mae: 0.0991
Epoch 36/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0270 - mae: 0.1331
13/13 [==============================] - 0s 2ms/step - loss: 0.0264 - mae: 0.1305

13/13 [==============================] - 0s 18ms/step - loss: 0.0264 - mae: 0.1305 - val_loss: 0.0159 - val_mae: 0.1003
Epoch 37/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0221 - mae: 0.1211
13/13 [==============================] - 0s 2ms/step - loss: 0.0249 - mae: 0.1254

13/13 [==============================] - 0s 18ms/step - loss: 0.0249 - mae: 0.1254 - val_loss: 0.0159 - val_mae: 0.1005
Epoch 38/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0208 - mae: 0.1122
13/13 [==============================] - 0s 2ms/step - loss: 0.0252 - mae: 0.1276

13/13 [==============================] - 0s 17ms/step - loss: 0.0252 - mae: 0.1276 - val_loss: 0.0164 - val_mae: 0.1021
Epoch 39/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0193 - mae: 0.1168
13/13 [==============================] - 0s 3ms/step - loss: 0.0232 - mae: 0.1241

13/13 [==============================] - 0s 17ms/step - loss: 0.0232 - mae: 0.1241 - val_loss: 0.0162 - val_mae: 0.1013
Epoch 40/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0192 - mae: 0.1172
13/13 [==============================] - 0s 2ms/step - loss: 0.0225 - mae: 0.1196

13/13 [==============================] - 0s 17ms/step - loss: 0.0225 - mae: 0.1196 - val_loss: 0.0159 - val_mae: 0.1002
Epoch 41/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0168 - mae: 0.1118
13/13 [==============================] - 0s 2ms/step - loss: 0.0236 - mae: 0.1250

13/13 [==============================] - 0s 18ms/step - loss: 0.0236 - mae: 0.1250 - val_loss: 0.0169 - val_mae: 0.1036
Epoch 42/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0299 - mae: 0.1315
13/13 [==============================] - 0s 2ms/step - loss: 0.0239 - mae: 0.1260

13/13 [==============================] - 0s 18ms/step - loss: 0.0239 - mae: 0.1260 - val_loss: 0.0167 - val_mae: 0.1029
Epoch 43/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0215 - mae: 0.1182
13/13 [==============================] - 0s 3ms/step - loss: 0.0236 - mae: 0.1257

13/13 [==============================] - 0s 18ms/step - loss: 0.0236 - mae: 0.1257 - val_loss: 0.0160 - val_mae: 0.1003
Epoch 44/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0284 - mae: 0.1358
13/13 [==============================] - 0s 3ms/step - loss: 0.0235 - mae: 0.1236

13/13 [==============================] - 0s 18ms/step - loss: 0.0235 - mae: 0.1236 - val_loss: 0.0166 - val_mae: 0.1025
Epoch 45/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0211 - mae: 0.1068
13/13 [==============================] - 0s 2ms/step - loss: 0.0218 - mae: 0.1185

13/13 [==============================] - 0s 18ms/step - loss: 0.0218 - mae: 0.1185 - val_loss: 0.0162 - val_mae: 0.1013
Epoch 46/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0222 - mae: 0.1194
13/13 [==============================] - 0s 2ms/step - loss: 0.0198 - mae: 0.1132

13/13 [==============================] - 0s 17ms/step - loss: 0.0198 - mae: 0.1132 - val_loss: 0.0153 - val_mae: 0.0979
Epoch 47/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0305 - mae: 0.1441
13/13 [==============================] - 0s 3ms/step - loss: 0.0214 - mae: 0.1163

13/13 [==============================] - 0s 17ms/step - loss: 0.0214 - mae: 0.1163 - val_loss: 0.0152 - val_mae: 0.0972
Epoch 48/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0185 - mae: 0.1075
13/13 [==============================] - 0s 2ms/step - loss: 0.0212 - mae: 0.1176

13/13 [==============================] - 0s 18ms/step - loss: 0.0212 - mae: 0.1176 - val_loss: 0.0153 - val_mae: 0.0976
Epoch 49/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0199 - mae: 0.1157
13/13 [==============================] - 0s 3ms/step - loss: 0.0202 - mae: 0.1157

13/13 [==============================] - 0s 18ms/step - loss: 0.0202 - mae: 0.1157 - val_loss: 0.0153 - val_mae: 0.0975
Epoch 50/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0210 - mae: 0.1161
13/13 [==============================] - 0s 2ms/step - loss: 0.0205 - mae: 0.1159

13/13 [==============================] - 0s 17ms/step - loss: 0.0205 - mae: 0.1159 - val_loss: 0.0152 - val_mae: 0.0972

Run completed: runs/2022-12-07T02-33-12Z

Training run 8/52 (flags = list(64, 10, 0.001, 30, 50, "relu", "sigmoid", 0.2, 0.2)) 
Using run directory runs/2022-12-07T02-33-36Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

 1/13 [=>............................] - ETA: 7s - loss: 1.5602 - mae: 1.2122
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0012s vs `on_train_batch_end` time: 0.0026s). Check your callbacks.

13/13 [==============================] - 1s 2ms/step - loss: 1.3538 - mae: 1.1252

13/13 [==============================] - 1s 67ms/step - loss: 1.3538 - mae: 1.1252 - val_loss: 1.2041 - val_mae: 1.0808
Epoch 2/50

 1/13 [=>............................] - ETA: 0s - loss: 1.2843 - mae: 1.0929
13/13 [==============================] - 0s 3ms/step - loss: 1.1664 - mae: 1.0348

13/13 [==============================] - 0s 18ms/step - loss: 1.1664 - mae: 1.0348 - val_loss: 0.9942 - val_mae: 0.9794
Epoch 3/50

 1/13 [=>............................] - ETA: 0s - loss: 1.0641 - mae: 1.0014
13/13 [==============================] - 0s 2ms/step - loss: 0.9953 - mae: 0.9506

13/13 [==============================] - 0s 18ms/step - loss: 0.9953 - mae: 0.9506 - val_loss: 0.8292 - val_mae: 0.8915
Epoch 4/50

 1/13 [=>............................] - ETA: 0s - loss: 0.9834 - mae: 0.9564
13/13 [==============================] - 0s 3ms/step - loss: 0.8278 - mae: 0.8609

13/13 [==============================] - 0s 18ms/step - loss: 0.8278 - mae: 0.8609 - val_loss: 0.6849 - val_mae: 0.8070
Epoch 5/50

 1/13 [=>............................] - ETA: 0s - loss: 0.8248 - mae: 0.8530
13/13 [==============================] - 0s 3ms/step - loss: 0.6823 - mae: 0.7715

13/13 [==============================] - 0s 18ms/step - loss: 0.6823 - mae: 0.7715 - val_loss: 0.5689 - val_mae: 0.7322
Epoch 6/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6191 - mae: 0.7293
13/13 [==============================] - 0s 4ms/step - loss: 0.5828 - mae: 0.7097

13/13 [==============================] - 0s 28ms/step - loss: 0.5828 - mae: 0.7097 - val_loss: 0.4709 - val_mae: 0.6624
Epoch 7/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4275 - mae: 0.6063
13/13 [==============================] - 0s 2ms/step - loss: 0.4876 - mae: 0.6421

13/13 [==============================] - 0s 17ms/step - loss: 0.4876 - mae: 0.6421 - val_loss: 0.3946 - val_mae: 0.6025
Epoch 8/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5118 - mae: 0.6591
13/13 [==============================] - 0s 3ms/step - loss: 0.4039 - mae: 0.5723

13/13 [==============================] - 0s 18ms/step - loss: 0.4039 - mae: 0.5723 - val_loss: 0.3342 - val_mae: 0.5506
Epoch 9/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4533 - mae: 0.6011
13/13 [==============================] - 0s 3ms/step - loss: 0.3493 - mae: 0.5262

13/13 [==============================] - 0s 18ms/step - loss: 0.3493 - mae: 0.5262 - val_loss: 0.2856 - val_mae: 0.5050
Epoch 10/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3155 - mae: 0.4813
13/13 [==============================] - 0s 2ms/step - loss: 0.3082 - mae: 0.4853

13/13 [==============================] - 0s 17ms/step - loss: 0.3082 - mae: 0.4853 - val_loss: 0.2415 - val_mae: 0.4599
Epoch 11/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2487 - mae: 0.4485
13/13 [==============================] - 0s 3ms/step - loss: 0.2743 - mae: 0.4569

13/13 [==============================] - 0s 18ms/step - loss: 0.2743 - mae: 0.4569 - val_loss: 0.2067 - val_mae: 0.4209
Epoch 12/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2377 - mae: 0.4417
13/13 [==============================] - 0s 3ms/step - loss: 0.2392 - mae: 0.4201

13/13 [==============================] - 0s 26ms/step - loss: 0.2392 - mae: 0.4201 - val_loss: 0.1765 - val_mae: 0.3856
Epoch 13/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2214 - mae: 0.3860
13/13 [==============================] - 0s 3ms/step - loss: 0.2105 - mae: 0.3864

13/13 [==============================] - 0s 20ms/step - loss: 0.2105 - mae: 0.3864 - val_loss: 0.1506 - val_mae: 0.3524
Epoch 14/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1863 - mae: 0.3724
13/13 [==============================] - 0s 3ms/step - loss: 0.1798 - mae: 0.3543

13/13 [==============================] - 0s 18ms/step - loss: 0.1798 - mae: 0.3543 - val_loss: 0.1302 - val_mae: 0.3239
Epoch 15/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1801 - mae: 0.3532
13/13 [==============================] - 0s 2ms/step - loss: 0.1654 - mae: 0.3346

13/13 [==============================] - 0s 18ms/step - loss: 0.1654 - mae: 0.3346 - val_loss: 0.1124 - val_mae: 0.2965
Epoch 16/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1493 - mae: 0.3170
13/13 [==============================] - 0s 3ms/step - loss: 0.1470 - mae: 0.3192

13/13 [==============================] - 0s 18ms/step - loss: 0.1470 - mae: 0.3192 - val_loss: 0.0988 - val_mae: 0.2737
Epoch 17/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1272 - mae: 0.3111
13/13 [==============================] - 0s 3ms/step - loss: 0.1409 - mae: 0.3083

13/13 [==============================] - 0s 18ms/step - loss: 0.1409 - mae: 0.3083 - val_loss: 0.0860 - val_mae: 0.2504
Epoch 18/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2129 - mae: 0.3839
13/13 [==============================] - 0s 2ms/step - loss: 0.1265 - mae: 0.2879

13/13 [==============================] - 0s 17ms/step - loss: 0.1265 - mae: 0.2879 - val_loss: 0.0763 - val_mae: 0.2320
Epoch 19/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1185 - mae: 0.2638
13/13 [==============================] - 0s 3ms/step - loss: 0.1157 - mae: 0.2713

13/13 [==============================] - 0s 18ms/step - loss: 0.1157 - mae: 0.2713 - val_loss: 0.0679 - val_mae: 0.2167
Epoch 20/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0921 - mae: 0.2539
13/13 [==============================] - 0s 2ms/step - loss: 0.1098 - mae: 0.2694

13/13 [==============================] - 0s 18ms/step - loss: 0.1098 - mae: 0.2694 - val_loss: 0.0603 - val_mae: 0.2021
Epoch 21/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1449 - mae: 0.3262
13/13 [==============================] - 0s 3ms/step - loss: 0.1052 - mae: 0.2667

13/13 [==============================] - 0s 19ms/step - loss: 0.1052 - mae: 0.2667 - val_loss: 0.0547 - val_mae: 0.1908
Epoch 22/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1589 - mae: 0.3170
13/13 [==============================] - 0s 2ms/step - loss: 0.1041 - mae: 0.2573

13/13 [==============================] - 0s 18ms/step - loss: 0.1041 - mae: 0.2573 - val_loss: 0.0497 - val_mae: 0.1803
Epoch 23/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1008 - mae: 0.2671
13/13 [==============================] - 0s 2ms/step - loss: 0.0860 - mae: 0.2360

13/13 [==============================] - 0s 17ms/step - loss: 0.0860 - mae: 0.2360 - val_loss: 0.0454 - val_mae: 0.1717
Epoch 24/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1086 - mae: 0.2616
13/13 [==============================] - 0s 3ms/step - loss: 0.0949 - mae: 0.2421

13/13 [==============================] - 0s 18ms/step - loss: 0.0949 - mae: 0.2421 - val_loss: 0.0417 - val_mae: 0.1643
Epoch 25/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0802 - mae: 0.2438
13/13 [==============================] - 0s 2ms/step - loss: 0.0999 - mae: 0.2500

13/13 [==============================] - 0s 17ms/step - loss: 0.0999 - mae: 0.2500 - val_loss: 0.0385 - val_mae: 0.1578
Epoch 26/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0877 - mae: 0.2439
13/13 [==============================] - 0s 3ms/step - loss: 0.0718 - mae: 0.2136

13/13 [==============================] - 0s 18ms/step - loss: 0.0718 - mae: 0.2136 - val_loss: 0.0366 - val_mae: 0.1538
Epoch 27/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0986 - mae: 0.2496
13/13 [==============================] - 0s 3ms/step - loss: 0.0787 - mae: 0.2213

13/13 [==============================] - 0s 18ms/step - loss: 0.0787 - mae: 0.2213 - val_loss: 0.0350 - val_mae: 0.1501
Epoch 28/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0587 - mae: 0.1873
13/13 [==============================] - 0s 2ms/step - loss: 0.0812 - mae: 0.2316

13/13 [==============================] - 0s 16ms/step - loss: 0.0812 - mae: 0.2316 - val_loss: 0.0328 - val_mae: 0.1447
Epoch 29/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0957 - mae: 0.2558
13/13 [==============================] - 0s 3ms/step - loss: 0.0784 - mae: 0.2250

13/13 [==============================] - 0s 18ms/step - loss: 0.0784 - mae: 0.2250 - val_loss: 0.0312 - val_mae: 0.1412
Epoch 30/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0958 - mae: 0.2443
13/13 [==============================] - 0s 2ms/step - loss: 0.0803 - mae: 0.2217

13/13 [==============================] - 0s 18ms/step - loss: 0.0803 - mae: 0.2217 - val_loss: 0.0301 - val_mae: 0.1385
Epoch 31/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1090 - mae: 0.2654
13/13 [==============================] - 0s 2ms/step - loss: 0.0779 - mae: 0.2214

13/13 [==============================] - 0s 18ms/step - loss: 0.0779 - mae: 0.2214 - val_loss: 0.0294 - val_mae: 0.1368
Epoch 32/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0640 - mae: 0.1933
13/13 [==============================] - 0s 3ms/step - loss: 0.0765 - mae: 0.2195

13/13 [==============================] - 0s 18ms/step - loss: 0.0765 - mae: 0.2195 - val_loss: 0.0283 - val_mae: 0.1342
Epoch 33/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0546 - mae: 0.1810
13/13 [==============================] - 0s 2ms/step - loss: 0.0727 - mae: 0.2116

13/13 [==============================] - 0s 18ms/step - loss: 0.0727 - mae: 0.2116 - val_loss: 0.0276 - val_mae: 0.1325
Epoch 34/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0699 - mae: 0.2229
13/13 [==============================] - 0s 3ms/step - loss: 0.0785 - mae: 0.2250

13/13 [==============================] - 0s 18ms/step - loss: 0.0785 - mae: 0.2250 - val_loss: 0.0265 - val_mae: 0.1295
Epoch 35/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0660 - mae: 0.2180
13/13 [==============================] - 0s 2ms/step - loss: 0.0698 - mae: 0.2113

13/13 [==============================] - 0s 18ms/step - loss: 0.0698 - mae: 0.2113 - val_loss: 0.0256 - val_mae: 0.1274
Epoch 36/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0912 - mae: 0.2421
13/13 [==============================] - 0s 2ms/step - loss: 0.0691 - mae: 0.2171

13/13 [==============================] - 0s 18ms/step - loss: 0.0691 - mae: 0.2171 - val_loss: 0.0248 - val_mae: 0.1257
Epoch 37/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0891 - mae: 0.2529
13/13 [==============================] - 0s 3ms/step - loss: 0.0736 - mae: 0.2200

13/13 [==============================] - 0s 18ms/step - loss: 0.0736 - mae: 0.2200 - val_loss: 0.0244 - val_mae: 0.1247
Epoch 38/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0676 - mae: 0.1979
13/13 [==============================] - 0s 2ms/step - loss: 0.0745 - mae: 0.2225

13/13 [==============================] - 0s 18ms/step - loss: 0.0745 - mae: 0.2225 - val_loss: 0.0240 - val_mae: 0.1236
Epoch 39/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0861 - mae: 0.2176
13/13 [==============================] - 0s 2ms/step - loss: 0.0731 - mae: 0.2183

13/13 [==============================] - 0s 18ms/step - loss: 0.0731 - mae: 0.2183 - val_loss: 0.0234 - val_mae: 0.1222
Epoch 40/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0691 - mae: 0.2231
13/13 [==============================] - 0s 2ms/step - loss: 0.0605 - mae: 0.1978

13/13 [==============================] - 0s 17ms/step - loss: 0.0605 - mae: 0.1978 - val_loss: 0.0230 - val_mae: 0.1212
Epoch 41/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0665 - mae: 0.2070
13/13 [==============================] - 0s 3ms/step - loss: 0.0698 - mae: 0.2140

13/13 [==============================] - 0s 18ms/step - loss: 0.0698 - mae: 0.2140 - val_loss: 0.0227 - val_mae: 0.1204
Epoch 42/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0874 - mae: 0.2429
13/13 [==============================] - 0s 3ms/step - loss: 0.0716 - mae: 0.2097

13/13 [==============================] - 0s 17ms/step - loss: 0.0716 - mae: 0.2097 - val_loss: 0.0224 - val_mae: 0.1195
Epoch 43/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0687 - mae: 0.2079
13/13 [==============================] - 0s 2ms/step - loss: 0.0684 - mae: 0.2090

13/13 [==============================] - 0s 18ms/step - loss: 0.0684 - mae: 0.2090 - val_loss: 0.0222 - val_mae: 0.1190
Epoch 44/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0790 - mae: 0.2266
13/13 [==============================] - 0s 2ms/step - loss: 0.0655 - mae: 0.2063

13/13 [==============================] - 0s 17ms/step - loss: 0.0655 - mae: 0.2063 - val_loss: 0.0217 - val_mae: 0.1178
Epoch 45/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0775 - mae: 0.2181
13/13 [==============================] - 0s 2ms/step - loss: 0.0643 - mae: 0.2019

13/13 [==============================] - 0s 18ms/step - loss: 0.0643 - mae: 0.2019 - val_loss: 0.0213 - val_mae: 0.1167
Epoch 46/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0679 - mae: 0.2044
13/13 [==============================] - 0s 2ms/step - loss: 0.0667 - mae: 0.2088

13/13 [==============================] - 0s 18ms/step - loss: 0.0667 - mae: 0.2088 - val_loss: 0.0211 - val_mae: 0.1161
Epoch 47/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0418 - mae: 0.1762
13/13 [==============================] - 0s 2ms/step - loss: 0.0656 - mae: 0.2075

13/13 [==============================] - 0s 18ms/step - loss: 0.0656 - mae: 0.2075 - val_loss: 0.0209 - val_mae: 0.1157
Epoch 48/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0353 - mae: 0.1487
13/13 [==============================] - 0s 3ms/step - loss: 0.0747 - mae: 0.2179

13/13 [==============================] - 0s 18ms/step - loss: 0.0747 - mae: 0.2179 - val_loss: 0.0207 - val_mae: 0.1152
Epoch 49/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0657 - mae: 0.1938
13/13 [==============================] - 0s 3ms/step - loss: 0.0650 - mae: 0.2015

13/13 [==============================] - 0s 18ms/step - loss: 0.0650 - mae: 0.2015 - val_loss: 0.0205 - val_mae: 0.1147
Epoch 50/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0485 - mae: 0.1829
13/13 [==============================] - 0s 2ms/step - loss: 0.0574 - mae: 0.1928

13/13 [==============================] - 0s 18ms/step - loss: 0.0574 - mae: 0.1928 - val_loss: 0.0205 - val_mae: 0.1145

Run completed: runs/2022-12-07T02-33-36Z

Training run 9/52 (flags = list(32, 32, 0.01, 50, 50, "sigmoid", "tanh", 0.6, 0.2)) 
Using run directory runs/2022-12-07T02-34-01Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

1/8 [==>...........................] - ETA: 3s - loss: 1.0651 - mae: 0.8575
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0016s vs `on_train_batch_end` time: 0.0037s). Check your callbacks.

8/8 [==============================] - 1s 3ms/step - loss: 0.7713 - mae: 0.7099

8/8 [==============================] - 1s 115ms/step - loss: 0.7713 - mae: 0.7099 - val_loss: 0.0235 - val_mae: 0.1301
Epoch 2/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4180 - mae: 0.4920
8/8 [==============================] - 0s 3ms/step - loss: 0.4030 - mae: 0.5038

8/8 [==============================] - 0s 28ms/step - loss: 0.4030 - mae: 0.5038 - val_loss: 0.0508 - val_mae: 0.1805
Epoch 3/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4444 - mae: 0.5259
8/8 [==============================] - 0s 3ms/step - loss: 0.4182 - mae: 0.5264

8/8 [==============================] - 0s 33ms/step - loss: 0.4182 - mae: 0.5264 - val_loss: 0.0271 - val_mae: 0.1179
Epoch 4/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3076 - mae: 0.4544
8/8 [==============================] - 0s 3ms/step - loss: 0.3566 - mae: 0.4778

8/8 [==============================] - 0s 35ms/step - loss: 0.3566 - mae: 0.4778 - val_loss: 0.0164 - val_mae: 0.0974
Epoch 5/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3334 - mae: 0.4752
8/8 [==============================] - 0s 2ms/step - loss: 0.3532 - mae: 0.4889

8/8 [==============================] - 0s 32ms/step - loss: 0.3532 - mae: 0.4889 - val_loss: 0.0142 - val_mae: 0.0913
Epoch 6/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3167 - mae: 0.4422
8/8 [==============================] - 0s 4ms/step - loss: 0.2710 - mae: 0.4245

8/8 [==============================] - 0s 44ms/step - loss: 0.2710 - mae: 0.4245 - val_loss: 0.0182 - val_mae: 0.0950
Epoch 7/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2794 - mae: 0.4017
8/8 [==============================] - 0s 3ms/step - loss: 0.2628 - mae: 0.4123

8/8 [==============================] - 0s 33ms/step - loss: 0.2628 - mae: 0.4123 - val_loss: 0.0123 - val_mae: 0.0834
Epoch 8/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1604 - mae: 0.3171
8/8 [==============================] - 0s 3ms/step - loss: 0.2017 - mae: 0.3620

8/8 [==============================] - 0s 32ms/step - loss: 0.2017 - mae: 0.3620 - val_loss: 0.0285 - val_mae: 0.1327
Epoch 9/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1950 - mae: 0.3668
8/8 [==============================] - 0s 3ms/step - loss: 0.2053 - mae: 0.3699

8/8 [==============================] - 0s 30ms/step - loss: 0.2053 - mae: 0.3699 - val_loss: 0.0157 - val_mae: 0.0897
Epoch 10/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2966 - mae: 0.4472
8/8 [==============================] - 0s 3ms/step - loss: 0.1695 - mae: 0.3230

8/8 [==============================] - 0s 38ms/step - loss: 0.1695 - mae: 0.3230 - val_loss: 0.0111 - val_mae: 0.0809
Epoch 11/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2708 - mae: 0.4378
8/8 [==============================] - 0s 3ms/step - loss: 0.1947 - mae: 0.3581

8/8 [==============================] - 0s 34ms/step - loss: 0.1947 - mae: 0.3581 - val_loss: 0.0119 - val_mae: 0.0798
Epoch 12/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1800 - mae: 0.3506
8/8 [==============================] - 0s 2ms/step - loss: 0.1722 - mae: 0.3307

8/8 [==============================] - 0s 28ms/step - loss: 0.1722 - mae: 0.3307 - val_loss: 0.0162 - val_mae: 0.0923
Epoch 13/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1207 - mae: 0.2671
8/8 [==============================] - 0s 3ms/step - loss: 0.1475 - mae: 0.3003

8/8 [==============================] - 0s 35ms/step - loss: 0.1475 - mae: 0.3003 - val_loss: 0.0253 - val_mae: 0.1246
Epoch 14/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1862 - mae: 0.3656
8/8 [==============================] - 0s 3ms/step - loss: 0.1354 - mae: 0.2947

8/8 [==============================] - 0s 34ms/step - loss: 0.1354 - mae: 0.2947 - val_loss: 0.0212 - val_mae: 0.1102
Epoch 15/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2110 - mae: 0.3819
8/8 [==============================] - 0s 3ms/step - loss: 0.1589 - mae: 0.3260

8/8 [==============================] - 0s 29ms/step - loss: 0.1589 - mae: 0.3260 - val_loss: 0.0225 - val_mae: 0.1163
Epoch 16/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0983 - mae: 0.2565
8/8 [==============================] - 0s 2ms/step - loss: 0.1133 - mae: 0.2696

8/8 [==============================] - 0s 33ms/step - loss: 0.1133 - mae: 0.2696 - val_loss: 0.0165 - val_mae: 0.0956
Epoch 17/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1368 - mae: 0.2999
8/8 [==============================] - 0s 3ms/step - loss: 0.1270 - mae: 0.2845

8/8 [==============================] - 0s 37ms/step - loss: 0.1270 - mae: 0.2845 - val_loss: 0.0250 - val_mae: 0.1262
Epoch 18/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1718 - mae: 0.3200
8/8 [==============================] - 0s 3ms/step - loss: 0.1060 - mae: 0.2546

8/8 [==============================] - 0s 31ms/step - loss: 0.1060 - mae: 0.2546 - val_loss: 0.0146 - val_mae: 0.0893
Epoch 19/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0934 - mae: 0.2473
8/8 [==============================] - 0s 2ms/step - loss: 0.1144 - mae: 0.2722

8/8 [==============================] - 0s 29ms/step - loss: 0.1144 - mae: 0.2722 - val_loss: 0.0148 - val_mae: 0.0902
Epoch 20/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1751 - mae: 0.3493
8/8 [==============================] - 0s 3ms/step - loss: 0.1354 - mae: 0.2928

8/8 [==============================] - 0s 33ms/step - loss: 0.1354 - mae: 0.2928 - val_loss: 0.0223 - val_mae: 0.1171
Epoch 21/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0676 - mae: 0.2163
8/8 [==============================] - 0s 3ms/step - loss: 0.0964 - mae: 0.2566

8/8 [==============================] - 0s 33ms/step - loss: 0.0964 - mae: 0.2566 - val_loss: 0.0158 - val_mae: 0.0949
Epoch 22/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1183 - mae: 0.2774
8/8 [==============================] - 0s 3ms/step - loss: 0.0964 - mae: 0.2477

8/8 [==============================] - 0s 30ms/step - loss: 0.0964 - mae: 0.2477 - val_loss: 0.0211 - val_mae: 0.1137
Epoch 23/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0580 - mae: 0.2027
8/8 [==============================] - 0s 3ms/step - loss: 0.1016 - mae: 0.2524

8/8 [==============================] - 0s 36ms/step - loss: 0.1016 - mae: 0.2524 - val_loss: 0.0192 - val_mae: 0.1079
Epoch 24/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0703 - mae: 0.1923
8/8 [==============================] - 0s 2ms/step - loss: 0.0830 - mae: 0.2238

8/8 [==============================] - 0s 32ms/step - loss: 0.0830 - mae: 0.2238 - val_loss: 0.0206 - val_mae: 0.1126
Epoch 25/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0957 - mae: 0.2520
8/8 [==============================] - 0s 3ms/step - loss: 0.0823 - mae: 0.2320

8/8 [==============================] - 0s 32ms/step - loss: 0.0823 - mae: 0.2320 - val_loss: 0.0153 - val_mae: 0.0949
Epoch 26/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0901 - mae: 0.2469
8/8 [==============================] - 0s 3ms/step - loss: 0.0943 - mae: 0.2440

8/8 [==============================] - 0s 35ms/step - loss: 0.0943 - mae: 0.2440 - val_loss: 0.0146 - val_mae: 0.0927
Epoch 27/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1062 - mae: 0.2517
8/8 [==============================] - 0s 2ms/step - loss: 0.0892 - mae: 0.2407

8/8 [==============================] - 0s 34ms/step - loss: 0.0892 - mae: 0.2407 - val_loss: 0.0136 - val_mae: 0.0888
Epoch 28/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1088 - mae: 0.2635
8/8 [==============================] - 0s 3ms/step - loss: 0.0869 - mae: 0.2337

8/8 [==============================] - 0s 37ms/step - loss: 0.0869 - mae: 0.2337 - val_loss: 0.0200 - val_mae: 0.1113
Epoch 29/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0886 - mae: 0.2392
8/8 [==============================] - 0s 3ms/step - loss: 0.0842 - mae: 0.2395

8/8 [==============================] - 0s 29ms/step - loss: 0.0842 - mae: 0.2395 - val_loss: 0.0177 - val_mae: 0.1038
Epoch 30/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0907 - mae: 0.2402
8/8 [==============================] - 0s 3ms/step - loss: 0.0913 - mae: 0.2401

8/8 [==============================] - 0s 37ms/step - loss: 0.0913 - mae: 0.2401 - val_loss: 0.0164 - val_mae: 0.0989
Epoch 31/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0706 - mae: 0.2197
8/8 [==============================] - 0s 3ms/step - loss: 0.0717 - mae: 0.2132

8/8 [==============================] - 0s 31ms/step - loss: 0.0717 - mae: 0.2132 - val_loss: 0.0207 - val_mae: 0.1143
Epoch 32/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0612 - mae: 0.2108
8/8 [==============================] - 0s 3ms/step - loss: 0.0709 - mae: 0.2184

8/8 [==============================] - 0s 38ms/step - loss: 0.0709 - mae: 0.2184 - val_loss: 0.0204 - val_mae: 0.1132
Epoch 33/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0936 - mae: 0.2607
8/8 [==============================] - 0s 3ms/step - loss: 0.0702 - mae: 0.2142

8/8 [==============================] - 0s 35ms/step - loss: 0.0702 - mae: 0.2142 - val_loss: 0.0198 - val_mae: 0.1116
Epoch 34/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0601 - mae: 0.1982
8/8 [==============================] - 0s 3ms/step - loss: 0.0656 - mae: 0.2059

8/8 [==============================] - 0s 29ms/step - loss: 0.0656 - mae: 0.2059 - val_loss: 0.0177 - val_mae: 0.1043
Epoch 35/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0403 - mae: 0.1595
8/8 [==============================] - 0s 3ms/step - loss: 0.0692 - mae: 0.2068

8/8 [==============================] - 0s 33ms/step - loss: 0.0692 - mae: 0.2068 - val_loss: 0.0159 - val_mae: 0.0979
Epoch 36/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0517 - mae: 0.1802
8/8 [==============================] - 0s 3ms/step - loss: 0.0739 - mae: 0.2160

8/8 [==============================] - 0s 35ms/step - loss: 0.0739 - mae: 0.2160 - val_loss: 0.0143 - val_mae: 0.0928
Epoch 37/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0769 - mae: 0.2201
8/8 [==============================] - 0s 3ms/step - loss: 0.0685 - mae: 0.2067

8/8 [==============================] - 0s 31ms/step - loss: 0.0685 - mae: 0.2067 - val_loss: 0.0195 - val_mae: 0.1105
Epoch 38/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0558 - mae: 0.1820
8/8 [==============================] - 0s 3ms/step - loss: 0.0588 - mae: 0.1907

8/8 [==============================] - 0s 36ms/step - loss: 0.0588 - mae: 0.1907 - val_loss: 0.0162 - val_mae: 0.0993
Epoch 39/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0596 - mae: 0.2055
8/8 [==============================] - 0s 3ms/step - loss: 0.0637 - mae: 0.2058

8/8 [==============================] - 0s 33ms/step - loss: 0.0637 - mae: 0.2058 - val_loss: 0.0166 - val_mae: 0.1006
Epoch 40/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0805 - mae: 0.2277
8/8 [==============================] - 0s 3ms/step - loss: 0.0579 - mae: 0.1932

8/8 [==============================] - 0s 31ms/step - loss: 0.0579 - mae: 0.1932 - val_loss: 0.0158 - val_mae: 0.0982
Epoch 41/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0459 - mae: 0.1750
8/8 [==============================] - 0s 3ms/step - loss: 0.0579 - mae: 0.1905

8/8 [==============================] - 0s 32ms/step - loss: 0.0579 - mae: 0.1905 - val_loss: 0.0205 - val_mae: 0.1142
Epoch 42/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0717 - mae: 0.2159
8/8 [==============================] - 0s 3ms/step - loss: 0.0617 - mae: 0.2026

8/8 [==============================] - 0s 35ms/step - loss: 0.0617 - mae: 0.2026 - val_loss: 0.0188 - val_mae: 0.1088
Epoch 43/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0549 - mae: 0.1787
8/8 [==============================] - 0s 3ms/step - loss: 0.0554 - mae: 0.1812

8/8 [==============================] - 0s 42ms/step - loss: 0.0554 - mae: 0.1812 - val_loss: 0.0176 - val_mae: 0.1044
Epoch 44/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0522 - mae: 0.1852
8/8 [==============================] - 0s 4ms/step - loss: 0.0555 - mae: 0.1892

8/8 [==============================] - 0s 38ms/step - loss: 0.0555 - mae: 0.1892 - val_loss: 0.0132 - val_mae: 0.0896
Epoch 45/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0404 - mae: 0.1628
8/8 [==============================] - 0s 3ms/step - loss: 0.0502 - mae: 0.1824

8/8 [==============================] - 0s 29ms/step - loss: 0.0502 - mae: 0.1824 - val_loss: 0.0157 - val_mae: 0.0981
Epoch 46/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0411 - mae: 0.1680
8/8 [==============================] - 0s 3ms/step - loss: 0.0532 - mae: 0.1858

8/8 [==============================] - 0s 42ms/step - loss: 0.0532 - mae: 0.1858 - val_loss: 0.0182 - val_mae: 0.1066
Epoch 47/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0492 - mae: 0.1762
8/8 [==============================] - 0s 3ms/step - loss: 0.0484 - mae: 0.1738

8/8 [==============================] - 0s 33ms/step - loss: 0.0484 - mae: 0.1738 - val_loss: 0.0168 - val_mae: 0.1017
Epoch 48/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0545 - mae: 0.1848
8/8 [==============================] - 0s 2ms/step - loss: 0.0488 - mae: 0.1746

8/8 [==============================] - 0s 32ms/step - loss: 0.0488 - mae: 0.1746 - val_loss: 0.0158 - val_mae: 0.0983
Epoch 49/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0637 - mae: 0.2041
8/8 [==============================] - 0s 4ms/step - loss: 0.0539 - mae: 0.1826

8/8 [==============================] - 0s 45ms/step - loss: 0.0539 - mae: 0.1826 - val_loss: 0.0194 - val_mae: 0.1106
Epoch 50/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0491 - mae: 0.1828
8/8 [==============================] - 0s 3ms/step - loss: 0.0534 - mae: 0.1870

8/8 [==============================] - 0s 28ms/step - loss: 0.0534 - mae: 0.1870 - val_loss: 0.0156 - val_mae: 0.0976

Run completed: runs/2022-12-07T02-34-01Z

Training run 10/52 (flags = list(64, 50, 0.01, 50, 30, "tanh", "tanh", 0.2, 0.2)) 
Using run directory runs/2022-12-07T02-34-27Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

1/8 [==>...........................] - ETA: 3s - loss: 0.4429 - mae: 0.5933
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0013s vs `on_train_batch_end` time: 0.0039s). Check your callbacks.

8/8 [==============================] - 1s 2ms/step - loss: 0.3781 - mae: 0.5031

8/8 [==============================] - 1s 107ms/step - loss: 0.3781 - mae: 0.5031 - val_loss: 0.1289 - val_mae: 0.2934
Epoch 2/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1652 - mae: 0.3063
8/8 [==============================] - 0s 2ms/step - loss: 0.1629 - mae: 0.3178

8/8 [==============================] - 0s 32ms/step - loss: 0.1629 - mae: 0.3178 - val_loss: 0.0885 - val_mae: 0.2402
Epoch 3/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1988 - mae: 0.3311
8/8 [==============================] - 0s 5ms/step - loss: 0.1324 - mae: 0.2890

8/8 [==============================] - 0s 55ms/step - loss: 0.1324 - mae: 0.2890 - val_loss: 0.0418 - val_mae: 0.1630
Epoch 4/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1174 - mae: 0.2739
8/8 [==============================] - 0s 3ms/step - loss: 0.1217 - mae: 0.2819

8/8 [==============================] - 0s 30ms/step - loss: 0.1217 - mae: 0.2819 - val_loss: 0.0329 - val_mae: 0.1460
Epoch 5/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1148 - mae: 0.2534
8/8 [==============================] - 0s 3ms/step - loss: 0.1345 - mae: 0.2930

8/8 [==============================] - 0s 29ms/step - loss: 0.1345 - mae: 0.2930 - val_loss: 0.0265 - val_mae: 0.1304
Epoch 6/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0840 - mae: 0.2377
8/8 [==============================] - 0s 3ms/step - loss: 0.1053 - mae: 0.2534

8/8 [==============================] - 0s 33ms/step - loss: 0.1053 - mae: 0.2534 - val_loss: 0.0229 - val_mae: 0.1200
Epoch 7/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1116 - mae: 0.2506
8/8 [==============================] - 0s 5ms/step - loss: 0.1099 - mae: 0.2644

8/8 [==============================] - 0s 58ms/step - loss: 0.1099 - mae: 0.2644 - val_loss: 0.0218 - val_mae: 0.1173
Epoch 8/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1256 - mae: 0.2665
8/8 [==============================] - 0s 3ms/step - loss: 0.1073 - mae: 0.2597

8/8 [==============================] - 0s 27ms/step - loss: 0.1073 - mae: 0.2597 - val_loss: 0.0190 - val_mae: 0.1072
Epoch 9/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0749 - mae: 0.2288
8/8 [==============================] - 0s 3ms/step - loss: 0.0899 - mae: 0.2386

8/8 [==============================] - 0s 31ms/step - loss: 0.0899 - mae: 0.2386 - val_loss: 0.0177 - val_mae: 0.1008
Epoch 10/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0859 - mae: 0.2592
8/8 [==============================] - 0s 3ms/step - loss: 0.0941 - mae: 0.2507

8/8 [==============================] - 0s 29ms/step - loss: 0.0941 - mae: 0.2507 - val_loss: 0.0129 - val_mae: 0.0851
Epoch 11/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0846 - mae: 0.2446
8/8 [==============================] - 0s 3ms/step - loss: 0.0849 - mae: 0.2307

8/8 [==============================] - 0s 29ms/step - loss: 0.0849 - mae: 0.2307 - val_loss: 0.0121 - val_mae: 0.0849
Epoch 12/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0790 - mae: 0.2202
8/8 [==============================] - 0s 3ms/step - loss: 0.0913 - mae: 0.2351

8/8 [==============================] - 0s 29ms/step - loss: 0.0913 - mae: 0.2351 - val_loss: 0.0109 - val_mae: 0.0805
Epoch 13/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0787 - mae: 0.2313
8/8 [==============================] - 0s 3ms/step - loss: 0.0780 - mae: 0.2191

8/8 [==============================] - 0s 29ms/step - loss: 0.0780 - mae: 0.2191 - val_loss: 0.0109 - val_mae: 0.0812
Epoch 14/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0733 - mae: 0.2112
8/8 [==============================] - 0s 3ms/step - loss: 0.0820 - mae: 0.2240

8/8 [==============================] - 0s 29ms/step - loss: 0.0820 - mae: 0.2240 - val_loss: 0.0103 - val_mae: 0.0774
Epoch 15/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0863 - mae: 0.2289
8/8 [==============================] - 0s 3ms/step - loss: 0.0767 - mae: 0.2143

8/8 [==============================] - 0s 29ms/step - loss: 0.0767 - mae: 0.2143 - val_loss: 0.0095 - val_mae: 0.0742
Epoch 16/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0921 - mae: 0.2386
8/8 [==============================] - 0s 3ms/step - loss: 0.0720 - mae: 0.2091

8/8 [==============================] - 0s 29ms/step - loss: 0.0720 - mae: 0.2091 - val_loss: 0.0089 - val_mae: 0.0719
Epoch 17/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0976 - mae: 0.2550
8/8 [==============================] - 0s 3ms/step - loss: 0.0681 - mae: 0.2073

8/8 [==============================] - 0s 31ms/step - loss: 0.0681 - mae: 0.2073 - val_loss: 0.0084 - val_mae: 0.0658
Epoch 18/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0598 - mae: 0.1956
8/8 [==============================] - 0s 3ms/step - loss: 0.0714 - mae: 0.2071

8/8 [==============================] - 0s 30ms/step - loss: 0.0714 - mae: 0.2071 - val_loss: 0.0084 - val_mae: 0.0695
Epoch 19/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0807 - mae: 0.2449
8/8 [==============================] - 0s 3ms/step - loss: 0.0719 - mae: 0.2162

8/8 [==============================] - 0s 28ms/step - loss: 0.0719 - mae: 0.2162 - val_loss: 0.0079 - val_mae: 0.0674
Epoch 20/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0514 - mae: 0.1894
8/8 [==============================] - 0s 3ms/step - loss: 0.0744 - mae: 0.2117

8/8 [==============================] - 0s 33ms/step - loss: 0.0744 - mae: 0.2117 - val_loss: 0.0083 - val_mae: 0.0666
Epoch 21/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0692 - mae: 0.2161
8/8 [==============================] - 0s 3ms/step - loss: 0.0671 - mae: 0.2059

8/8 [==============================] - 0s 33ms/step - loss: 0.0671 - mae: 0.2059 - val_loss: 0.0087 - val_mae: 0.0664
Epoch 22/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0661 - mae: 0.2203
8/8 [==============================] - 0s 3ms/step - loss: 0.0666 - mae: 0.2023

8/8 [==============================] - 0s 29ms/step - loss: 0.0666 - mae: 0.2023 - val_loss: 0.0091 - val_mae: 0.0671
Epoch 23/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1182 - mae: 0.2529
8/8 [==============================] - 0s 3ms/step - loss: 0.0688 - mae: 0.2066

8/8 [==============================] - 0s 45ms/step - loss: 0.0688 - mae: 0.2066 - val_loss: 0.0078 - val_mae: 0.0653
Epoch 24/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0767 - mae: 0.2140
8/8 [==============================] - 0s 5ms/step - loss: 0.0711 - mae: 0.2086

8/8 [==============================] - 0s 60ms/step - loss: 0.0711 - mae: 0.2086 - val_loss: 0.0089 - val_mae: 0.0727
Epoch 25/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0648 - mae: 0.2027
8/8 [==============================] - 0s 3ms/step - loss: 0.0588 - mae: 0.1950

8/8 [==============================] - 0s 33ms/step - loss: 0.0588 - mae: 0.1950 - val_loss: 0.0074 - val_mae: 0.0648
Epoch 26/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0493 - mae: 0.1829
8/8 [==============================] - 0s 4ms/step - loss: 0.0688 - mae: 0.2074

8/8 [==============================] - 0s 34ms/step - loss: 0.0688 - mae: 0.2074 - val_loss: 0.0067 - val_mae: 0.0618
Epoch 27/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0742 - mae: 0.2151
8/8 [==============================] - 0s 5ms/step - loss: 0.0547 - mae: 0.1920

8/8 [==============================] - 0s 52ms/step - loss: 0.0547 - mae: 0.1920 - val_loss: 0.0065 - val_mae: 0.0596
Epoch 28/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0406 - mae: 0.1612
8/8 [==============================] - 0s 3ms/step - loss: 0.0611 - mae: 0.1975

8/8 [==============================] - 0s 33ms/step - loss: 0.0611 - mae: 0.1975 - val_loss: 0.0060 - val_mae: 0.0578
Epoch 29/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0565 - mae: 0.1895
8/8 [==============================] - 0s 3ms/step - loss: 0.0593 - mae: 0.1997

8/8 [==============================] - 0s 28ms/step - loss: 0.0593 - mae: 0.1997 - val_loss: 0.0056 - val_mae: 0.0574
Epoch 30/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0394 - mae: 0.1476
8/8 [==============================] - 0s 2ms/step - loss: 0.0533 - mae: 0.1789

8/8 [==============================] - 0s 29ms/step - loss: 0.0533 - mae: 0.1789 - val_loss: 0.0057 - val_mae: 0.0576

Run completed: runs/2022-12-07T02-34-27Z

Training run 11/52 (flags = list(32, 32, 0.01, 30, 30, "relu", "relu", 0.2, 0.6)) 
Using run directory runs/2022-12-07T02-34-48Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

 1/13 [=>............................] - ETA: 5s - loss: 0.9918 - mae: 0.8722
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0011s vs `on_train_batch_end` time: 0.0046s). Check your callbacks.

13/13 [==============================] - 0s 2ms/step - loss: 0.6377 - mae: 0.6388

13/13 [==============================] - 1s 66ms/step - loss: 0.6377 - mae: 0.6388 - val_loss: 0.1312 - val_mae: 0.2985
Epoch 2/30

 1/13 [=>............................] - ETA: 0s - loss: 0.3072 - mae: 0.4247
13/13 [==============================] - 0s 4ms/step - loss: 0.3800 - mae: 0.4946

13/13 [==============================] - 0s 23ms/step - loss: 0.3800 - mae: 0.4946 - val_loss: 0.1072 - val_mae: 0.2691
Epoch 3/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2922 - mae: 0.4075
13/13 [==============================] - 0s 4ms/step - loss: 0.2798 - mae: 0.4272

13/13 [==============================] - 0s 26ms/step - loss: 0.2798 - mae: 0.4272 - val_loss: 0.0725 - val_mae: 0.2238
Epoch 4/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1391 - mae: 0.2890
13/13 [==============================] - 0s 2ms/step - loss: 0.1977 - mae: 0.3599

13/13 [==============================] - 0s 17ms/step - loss: 0.1977 - mae: 0.3599 - val_loss: 0.0444 - val_mae: 0.1752
Epoch 5/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1626 - mae: 0.3212
13/13 [==============================] - 0s 2ms/step - loss: 0.1910 - mae: 0.3476

13/13 [==============================] - 0s 19ms/step - loss: 0.1910 - mae: 0.3476 - val_loss: 0.0473 - val_mae: 0.1770
Epoch 6/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1939 - mae: 0.3434
13/13 [==============================] - 0s 5ms/step - loss: 0.1767 - mae: 0.3383

13/13 [==============================] - 0s 32ms/step - loss: 0.1767 - mae: 0.3383 - val_loss: 0.0389 - val_mae: 0.1606
Epoch 7/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0917 - mae: 0.2483
13/13 [==============================] - 0s 2ms/step - loss: 0.1440 - mae: 0.3033

13/13 [==============================] - 0s 19ms/step - loss: 0.1440 - mae: 0.3033 - val_loss: 0.0379 - val_mae: 0.1580
Epoch 8/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1333 - mae: 0.3213
13/13 [==============================] - 0s 3ms/step - loss: 0.1166 - mae: 0.2776

13/13 [==============================] - 0s 18ms/step - loss: 0.1166 - mae: 0.2776 - val_loss: 0.0311 - val_mae: 0.1436
Epoch 9/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1335 - mae: 0.2905
13/13 [==============================] - 0s 3ms/step - loss: 0.1210 - mae: 0.2752

13/13 [==============================] - 0s 18ms/step - loss: 0.1210 - mae: 0.2752 - val_loss: 0.0310 - val_mae: 0.1450
Epoch 10/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0963 - mae: 0.2436
13/13 [==============================] - 0s 2ms/step - loss: 0.1041 - mae: 0.2579

13/13 [==============================] - 0s 18ms/step - loss: 0.1041 - mae: 0.2579 - val_loss: 0.0315 - val_mae: 0.1464
Epoch 11/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0851 - mae: 0.2385
13/13 [==============================] - 0s 2ms/step - loss: 0.0861 - mae: 0.2342

13/13 [==============================] - 0s 18ms/step - loss: 0.0861 - mae: 0.2342 - val_loss: 0.0231 - val_mae: 0.1283
Epoch 12/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0737 - mae: 0.2012
13/13 [==============================] - 0s 2ms/step - loss: 0.0771 - mae: 0.2231

13/13 [==============================] - 0s 18ms/step - loss: 0.0771 - mae: 0.2231 - val_loss: 0.0215 - val_mae: 0.1238
Epoch 13/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0609 - mae: 0.1999
13/13 [==============================] - 0s 3ms/step - loss: 0.0924 - mae: 0.2376

13/13 [==============================] - 0s 19ms/step - loss: 0.0924 - mae: 0.2376 - val_loss: 0.0229 - val_mae: 0.1290
Epoch 14/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0747 - mae: 0.1972
13/13 [==============================] - 0s 2ms/step - loss: 0.0726 - mae: 0.2144

13/13 [==============================] - 0s 19ms/step - loss: 0.0726 - mae: 0.2144 - val_loss: 0.0220 - val_mae: 0.1274
Epoch 15/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0950 - mae: 0.2233
13/13 [==============================] - 0s 2ms/step - loss: 0.0654 - mae: 0.2044

13/13 [==============================] - 0s 18ms/step - loss: 0.0654 - mae: 0.2044 - val_loss: 0.0190 - val_mae: 0.1189
Epoch 16/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0627 - mae: 0.1938
13/13 [==============================] - 0s 3ms/step - loss: 0.0584 - mae: 0.1889

13/13 [==============================] - 0s 18ms/step - loss: 0.0584 - mae: 0.1889 - val_loss: 0.0201 - val_mae: 0.1238
Epoch 17/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0923 - mae: 0.2448
13/13 [==============================] - 0s 2ms/step - loss: 0.0617 - mae: 0.1944

13/13 [==============================] - 0s 18ms/step - loss: 0.0617 - mae: 0.1944 - val_loss: 0.0171 - val_mae: 0.1140
Epoch 18/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0615 - mae: 0.1923
13/13 [==============================] - 0s 2ms/step - loss: 0.0520 - mae: 0.1754

13/13 [==============================] - 0s 18ms/step - loss: 0.0520 - mae: 0.1754 - val_loss: 0.0154 - val_mae: 0.1084
Epoch 19/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0554 - mae: 0.1918
13/13 [==============================] - 0s 3ms/step - loss: 0.0395 - mae: 0.1609

13/13 [==============================] - 0s 17ms/step - loss: 0.0395 - mae: 0.1609 - val_loss: 0.0148 - val_mae: 0.1061
Epoch 20/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0620 - mae: 0.1948
13/13 [==============================] - 0s 2ms/step - loss: 0.0472 - mae: 0.1705

13/13 [==============================] - 0s 18ms/step - loss: 0.0472 - mae: 0.1705 - val_loss: 0.0159 - val_mae: 0.1109
Epoch 21/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0808 - mae: 0.2261
13/13 [==============================] - 0s 2ms/step - loss: 0.0446 - mae: 0.1716

13/13 [==============================] - 0s 18ms/step - loss: 0.0446 - mae: 0.1716 - val_loss: 0.0144 - val_mae: 0.1052
Epoch 22/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0328 - mae: 0.1508
13/13 [==============================] - 0s 3ms/step - loss: 0.0418 - mae: 0.1616

13/13 [==============================] - 0s 18ms/step - loss: 0.0418 - mae: 0.1616 - val_loss: 0.0138 - val_mae: 0.1029
Epoch 23/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0633 - mae: 0.1942
13/13 [==============================] - 0s 2ms/step - loss: 0.0407 - mae: 0.1610

13/13 [==============================] - 0s 18ms/step - loss: 0.0407 - mae: 0.1610 - val_loss: 0.0133 - val_mae: 0.1012
Epoch 24/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0436 - mae: 0.1704
13/13 [==============================] - 0s 3ms/step - loss: 0.0356 - mae: 0.1485

13/13 [==============================] - 0s 18ms/step - loss: 0.0356 - mae: 0.1485 - val_loss: 0.0158 - val_mae: 0.1109
Epoch 25/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0521 - mae: 0.1706
13/13 [==============================] - 0s 3ms/step - loss: 0.0448 - mae: 0.1653

13/13 [==============================] - 0s 18ms/step - loss: 0.0448 - mae: 0.1653 - val_loss: 0.0147 - val_mae: 0.1068
Epoch 26/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0498 - mae: 0.1834
13/13 [==============================] - 0s 3ms/step - loss: 0.0399 - mae: 0.1571

13/13 [==============================] - 0s 18ms/step - loss: 0.0399 - mae: 0.1571 - val_loss: 0.0135 - val_mae: 0.1024
Epoch 27/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0356 - mae: 0.1580
13/13 [==============================] - 0s 3ms/step - loss: 0.0376 - mae: 0.1485

13/13 [==============================] - 0s 18ms/step - loss: 0.0376 - mae: 0.1485 - val_loss: 0.0131 - val_mae: 0.1010
Epoch 28/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0218 - mae: 0.1198
13/13 [==============================] - 0s 3ms/step - loss: 0.0342 - mae: 0.1426

13/13 [==============================] - 0s 18ms/step - loss: 0.0342 - mae: 0.1426 - val_loss: 0.0126 - val_mae: 0.0992
Epoch 29/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0292 - mae: 0.1425
13/13 [==============================] - 0s 2ms/step - loss: 0.0346 - mae: 0.1454

13/13 [==============================] - 0s 20ms/step - loss: 0.0346 - mae: 0.1454 - val_loss: 0.0127 - val_mae: 0.0994
Epoch 30/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0382 - mae: 0.1705
13/13 [==============================] - 0s 3ms/step - loss: 0.0332 - mae: 0.1340

13/13 [==============================] - 0s 18ms/step - loss: 0.0332 - mae: 0.1340 - val_loss: 0.0119 - val_mae: 0.0965

Run completed: runs/2022-12-07T02-34-48Z

Training run 12/52 (flags = list(64, 50, 0.001, 50, 30, "relu", "relu", 0.6, 0.6)) 
Using run directory runs/2022-12-07T02-35-08Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

1/8 [==>...........................] - ETA: 3s - loss: 2.9024 - mae: 1.3704
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0015s vs `on_train_batch_end` time: 0.0031s). Check your callbacks.

8/8 [==============================] - 0s 2ms/step - loss: 1.6705 - mae: 1.0760

8/8 [==============================] - 1s 110ms/step - loss: 1.6705 - mae: 1.0760 - val_loss: 0.7476 - val_mae: 0.8348
Epoch 2/30

1/8 [==>...........................] - ETA: 0s - loss: 1.4755 - mae: 1.0147
8/8 [==============================] - 0s 3ms/step - loss: 1.4371 - mae: 0.9883

8/8 [==============================] - 0s 31ms/step - loss: 1.4371 - mae: 0.9883 - val_loss: 0.6157 - val_mae: 0.7545
Epoch 3/30

1/8 [==>...........................] - ETA: 0s - loss: 1.2628 - mae: 0.9329
8/8 [==============================] - 0s 5ms/step - loss: 1.0913 - mae: 0.8598

8/8 [==============================] - 0s 63ms/step - loss: 1.0913 - mae: 0.8598 - val_loss: 0.5226 - val_mae: 0.6916
Epoch 4/30

1/8 [==>...........................] - ETA: 0s - loss: 1.3630 - mae: 0.9537
8/8 [==============================] - 0s 3ms/step - loss: 1.1369 - mae: 0.8532

8/8 [==============================] - 0s 31ms/step - loss: 1.1369 - mae: 0.8532 - val_loss: 0.4415 - val_mae: 0.6322
Epoch 5/30

1/8 [==>...........................] - ETA: 0s - loss: 0.5715 - mae: 0.6511
8/8 [==============================] - 0s 3ms/step - loss: 1.0178 - mae: 0.8262

8/8 [==============================] - 0s 31ms/step - loss: 1.0178 - mae: 0.8262 - val_loss: 0.3709 - val_mae: 0.5766
Epoch 6/30

1/8 [==>...........................] - ETA: 0s - loss: 0.9296 - mae: 0.7765
8/8 [==============================] - 0s 5ms/step - loss: 1.0373 - mae: 0.8220

8/8 [==============================] - 0s 53ms/step - loss: 1.0373 - mae: 0.8220 - val_loss: 0.3209 - val_mae: 0.5334
Epoch 7/30

1/8 [==>...........................] - ETA: 0s - loss: 0.7593 - mae: 0.7429
8/8 [==============================] - 0s 5ms/step - loss: 0.8079 - mae: 0.7211

8/8 [==============================] - 0s 47ms/step - loss: 0.8079 - mae: 0.7211 - val_loss: 0.2857 - val_mae: 0.5014
Epoch 8/30

1/8 [==>...........................] - ETA: 0s - loss: 0.7191 - mae: 0.6722
8/8 [==============================] - 0s 3ms/step - loss: 0.6930 - mae: 0.6699

8/8 [==============================] - 0s 28ms/step - loss: 0.6930 - mae: 0.6699 - val_loss: 0.2624 - val_mae: 0.4783
Epoch 9/30

1/8 [==>...........................] - ETA: 0s - loss: 0.5886 - mae: 0.6446
8/8 [==============================] - 0s 3ms/step - loss: 0.7423 - mae: 0.6799

8/8 [==============================] - 0s 31ms/step - loss: 0.7423 - mae: 0.6799 - val_loss: 0.2325 - val_mae: 0.4463
Epoch 10/30

1/8 [==>...........................] - ETA: 0s - loss: 0.8371 - mae: 0.7106
8/8 [==============================] - 0s 3ms/step - loss: 0.6771 - mae: 0.6566

8/8 [==============================] - 0s 29ms/step - loss: 0.6771 - mae: 0.6566 - val_loss: 0.2110 - val_mae: 0.4233
Epoch 11/30

1/8 [==>...........................] - ETA: 0s - loss: 0.9221 - mae: 0.7199
8/8 [==============================] - 0s 3ms/step - loss: 0.7860 - mae: 0.7031

8/8 [==============================] - 0s 31ms/step - loss: 0.7860 - mae: 0.7031 - val_loss: 0.1936 - val_mae: 0.4029
Epoch 12/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6836 - mae: 0.6753
8/8 [==============================] - 0s 3ms/step - loss: 0.6784 - mae: 0.6651

8/8 [==============================] - 0s 31ms/step - loss: 0.6784 - mae: 0.6651 - val_loss: 0.1788 - val_mae: 0.3864
Epoch 13/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6380 - mae: 0.6389
8/8 [==============================] - 0s 3ms/step - loss: 0.7063 - mae: 0.6620

8/8 [==============================] - 0s 29ms/step - loss: 0.7063 - mae: 0.6620 - val_loss: 0.1675 - val_mae: 0.3730
Epoch 14/30

1/8 [==>...........................] - ETA: 0s - loss: 0.4733 - mae: 0.5396
8/8 [==============================] - 0s 3ms/step - loss: 0.5319 - mae: 0.5872

8/8 [==============================] - 0s 33ms/step - loss: 0.5319 - mae: 0.5872 - val_loss: 0.1597 - val_mae: 0.3630
Epoch 15/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6572 - mae: 0.6189
8/8 [==============================] - 0s 3ms/step - loss: 0.6765 - mae: 0.6521

8/8 [==============================] - 0s 31ms/step - loss: 0.6765 - mae: 0.6521 - val_loss: 0.1453 - val_mae: 0.3434
Epoch 16/30

1/8 [==>...........................] - ETA: 0s - loss: 0.4115 - mae: 0.5148
8/8 [==============================] - 0s 3ms/step - loss: 0.5770 - mae: 0.6217

8/8 [==============================] - 0s 31ms/step - loss: 0.5770 - mae: 0.6217 - val_loss: 0.1335 - val_mae: 0.3264
Epoch 17/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6294 - mae: 0.6428
8/8 [==============================] - 0s 3ms/step - loss: 0.5879 - mae: 0.6193

8/8 [==============================] - 0s 29ms/step - loss: 0.5879 - mae: 0.6193 - val_loss: 0.1245 - val_mae: 0.3134
Epoch 18/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6450 - mae: 0.6308
8/8 [==============================] - 0s 3ms/step - loss: 0.5732 - mae: 0.5889

8/8 [==============================] - 0s 31ms/step - loss: 0.5732 - mae: 0.5889 - val_loss: 0.1217 - val_mae: 0.3108
Epoch 19/30

1/8 [==>...........................] - ETA: 0s - loss: 0.4074 - mae: 0.5285
8/8 [==============================] - 0s 3ms/step - loss: 0.5090 - mae: 0.5657

8/8 [==============================] - 0s 31ms/step - loss: 0.5090 - mae: 0.5657 - val_loss: 0.1165 - val_mae: 0.3039
Epoch 20/30

1/8 [==>...........................] - ETA: 0s - loss: 1.0073 - mae: 0.7102
8/8 [==============================] - 0s 3ms/step - loss: 0.6007 - mae: 0.5944

8/8 [==============================] - 0s 34ms/step - loss: 0.6007 - mae: 0.5944 - val_loss: 0.1136 - val_mae: 0.3012
Epoch 21/30

1/8 [==>...........................] - ETA: 0s - loss: 0.5575 - mae: 0.5758
8/8 [==============================] - 0s 3ms/step - loss: 0.5238 - mae: 0.5747

8/8 [==============================] - 0s 30ms/step - loss: 0.5238 - mae: 0.5747 - val_loss: 0.1078 - val_mae: 0.2926
Epoch 22/30

1/8 [==>...........................] - ETA: 0s - loss: 0.4620 - mae: 0.5291
8/8 [==============================] - 0s 3ms/step - loss: 0.5401 - mae: 0.5637

8/8 [==============================] - 0s 31ms/step - loss: 0.5401 - mae: 0.5637 - val_loss: 0.1035 - val_mae: 0.2860
Epoch 23/30

1/8 [==>...........................] - ETA: 0s - loss: 0.5695 - mae: 0.6200
8/8 [==============================] - 0s 3ms/step - loss: 0.5120 - mae: 0.5649

8/8 [==============================] - 0s 29ms/step - loss: 0.5120 - mae: 0.5649 - val_loss: 0.0991 - val_mae: 0.2801
Epoch 24/30

1/8 [==>...........................] - ETA: 0s - loss: 0.4158 - mae: 0.5082
8/8 [==============================] - 0s 2ms/step - loss: 0.4723 - mae: 0.5469

8/8 [==============================] - 0s 31ms/step - loss: 0.4723 - mae: 0.5469 - val_loss: 0.0925 - val_mae: 0.2697
Epoch 25/30

1/8 [==>...........................] - ETA: 0s - loss: 0.5793 - mae: 0.5959
8/8 [==============================] - 0s 3ms/step - loss: 0.4604 - mae: 0.5186

8/8 [==============================] - 0s 29ms/step - loss: 0.4604 - mae: 0.5186 - val_loss: 0.0894 - val_mae: 0.2651
Epoch 26/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6084 - mae: 0.6303
8/8 [==============================] - 0s 3ms/step - loss: 0.4714 - mae: 0.5276

8/8 [==============================] - 0s 33ms/step - loss: 0.4714 - mae: 0.5276 - val_loss: 0.0875 - val_mae: 0.2623
Epoch 27/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3354 - mae: 0.4689
8/8 [==============================] - 0s 3ms/step - loss: 0.5019 - mae: 0.5365

8/8 [==============================] - 0s 29ms/step - loss: 0.5019 - mae: 0.5365 - val_loss: 0.0862 - val_mae: 0.2606
Epoch 28/30

1/8 [==>...........................] - ETA: 0s - loss: 0.5103 - mae: 0.5393
8/8 [==============================] - 0s 3ms/step - loss: 0.4720 - mae: 0.5241

8/8 [==============================] - 0s 31ms/step - loss: 0.4720 - mae: 0.5241 - val_loss: 0.0847 - val_mae: 0.2588
Epoch 29/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3834 - mae: 0.4668
8/8 [==============================] - 0s 3ms/step - loss: 0.4784 - mae: 0.5395

8/8 [==============================] - 0s 29ms/step - loss: 0.4784 - mae: 0.5395 - val_loss: 0.0832 - val_mae: 0.2566
Epoch 30/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3068 - mae: 0.4507
8/8 [==============================] - 0s 3ms/step - loss: 0.4286 - mae: 0.4998

8/8 [==============================] - 0s 30ms/step - loss: 0.4286 - mae: 0.4998 - val_loss: 0.0809 - val_mae: 0.2529

Run completed: runs/2022-12-07T02-35-08Z

Training run 13/52 (flags = list(64, 32, 0.01, 30, 50, "sigmoid", "sigmoid", 0.2, 0.2)) 
Using run directory runs/2022-12-07T02-35-28Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

 1/13 [=>............................] - ETA: 5s - loss: 1.2978 - mae: 1.0392
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0010s vs `on_train_batch_end` time: 0.0051s). Check your callbacks.

13/13 [==============================] - 0s 2ms/step - loss: 0.3872 - mae: 0.4813

13/13 [==============================] - 1s 64ms/step - loss: 0.3872 - mae: 0.4813 - val_loss: 0.0215 - val_mae: 0.1218
Epoch 2/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1736 - mae: 0.3638
13/13 [==============================] - 0s 2ms/step - loss: 0.1728 - mae: 0.3315

13/13 [==============================] - 0s 20ms/step - loss: 0.1728 - mae: 0.3315 - val_loss: 0.0168 - val_mae: 0.0973
Epoch 3/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1493 - mae: 0.3230
12/13 [==========================>...] - ETA: 0s - loss: 0.1607 - mae: 0.3223
13/13 [==============================] - 0s 5ms/step - loss: 0.1602 - mae: 0.3219

13/13 [==============================] - 0s 38ms/step - loss: 0.1602 - mae: 0.3219 - val_loss: 0.0214 - val_mae: 0.1117
Epoch 4/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2000 - mae: 0.3638
13/13 [==============================] - 0s 3ms/step - loss: 0.1909 - mae: 0.3599

13/13 [==============================] - 0s 19ms/step - loss: 0.1909 - mae: 0.3599 - val_loss: 0.0188 - val_mae: 0.1138
Epoch 5/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0995 - mae: 0.2517
13/13 [==============================] - 0s 3ms/step - loss: 0.1713 - mae: 0.3287

13/13 [==============================] - 0s 20ms/step - loss: 0.1713 - mae: 0.3287 - val_loss: 0.0153 - val_mae: 0.0980
Epoch 6/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1393 - mae: 0.3283
13/13 [==============================] - 0s 4ms/step - loss: 0.1605 - mae: 0.3220

13/13 [==============================] - 0s 35ms/step - loss: 0.1605 - mae: 0.3220 - val_loss: 0.0202 - val_mae: 0.1197
Epoch 7/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1448 - mae: 0.3126
13/13 [==============================] - 0s 4ms/step - loss: 0.1411 - mae: 0.3026

13/13 [==============================] - 0s 28ms/step - loss: 0.1411 - mae: 0.3026 - val_loss: 0.0401 - val_mae: 0.1803
Epoch 8/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1569 - mae: 0.3052
13/13 [==============================] - 0s 3ms/step - loss: 0.1284 - mae: 0.2870

13/13 [==============================] - 0s 17ms/step - loss: 0.1284 - mae: 0.2870 - val_loss: 0.0159 - val_mae: 0.1035
Epoch 9/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1584 - mae: 0.3597
13/13 [==============================] - 0s 3ms/step - loss: 0.1293 - mae: 0.2885

13/13 [==============================] - 0s 20ms/step - loss: 0.1293 - mae: 0.2885 - val_loss: 0.0154 - val_mae: 0.1010
Epoch 10/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1302 - mae: 0.2944
13/13 [==============================] - 0s 2ms/step - loss: 0.1184 - mae: 0.2726

13/13 [==============================] - 0s 19ms/step - loss: 0.1184 - mae: 0.2726 - val_loss: 0.0138 - val_mae: 0.0880
Epoch 11/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1131 - mae: 0.2843
13/13 [==============================] - 0s 3ms/step - loss: 0.1163 - mae: 0.2732

13/13 [==============================] - 0s 18ms/step - loss: 0.1163 - mae: 0.2732 - val_loss: 0.0187 - val_mae: 0.1148
Epoch 12/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1046 - mae: 0.2586
13/13 [==============================] - 0s 3ms/step - loss: 0.1213 - mae: 0.2830

13/13 [==============================] - 0s 21ms/step - loss: 0.1213 - mae: 0.2830 - val_loss: 0.0134 - val_mae: 0.0850
Epoch 13/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1497 - mae: 0.3302
13/13 [==============================] - 0s 3ms/step - loss: 0.1154 - mae: 0.2651

13/13 [==============================] - 0s 23ms/step - loss: 0.1154 - mae: 0.2651 - val_loss: 0.0137 - val_mae: 0.0942
Epoch 14/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0725 - mae: 0.2280
13/13 [==============================] - 0s 3ms/step - loss: 0.0979 - mae: 0.2486

13/13 [==============================] - 0s 20ms/step - loss: 0.0979 - mae: 0.2486 - val_loss: 0.0219 - val_mae: 0.1279
Epoch 15/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0723 - mae: 0.2102
13/13 [==============================] - 0s 3ms/step - loss: 0.0970 - mae: 0.2482

13/13 [==============================] - 0s 18ms/step - loss: 0.0970 - mae: 0.2482 - val_loss: 0.0148 - val_mae: 0.1003
Epoch 16/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1029 - mae: 0.2600
13/13 [==============================] - 0s 3ms/step - loss: 0.1012 - mae: 0.2579

13/13 [==============================] - 0s 18ms/step - loss: 0.1012 - mae: 0.2579 - val_loss: 0.0167 - val_mae: 0.0959
Epoch 17/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0788 - mae: 0.2150
13/13 [==============================] - 0s 2ms/step - loss: 0.0991 - mae: 0.2541

13/13 [==============================] - 0s 18ms/step - loss: 0.0991 - mae: 0.2541 - val_loss: 0.0154 - val_mae: 0.0911
Epoch 18/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1720 - mae: 0.3407
13/13 [==============================] - 0s 3ms/step - loss: 0.1001 - mae: 0.2499

13/13 [==============================] - 0s 19ms/step - loss: 0.1001 - mae: 0.2499 - val_loss: 0.0136 - val_mae: 0.0851
Epoch 19/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1092 - mae: 0.2539
13/13 [==============================] - 0s 3ms/step - loss: 0.0886 - mae: 0.2359

13/13 [==============================] - 0s 20ms/step - loss: 0.0886 - mae: 0.2359 - val_loss: 0.0249 - val_mae: 0.1395
Epoch 20/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1557 - mae: 0.3128
13/13 [==============================] - 0s 3ms/step - loss: 0.0955 - mae: 0.2465

13/13 [==============================] - 0s 19ms/step - loss: 0.0955 - mae: 0.2465 - val_loss: 0.0154 - val_mae: 0.1040
Epoch 21/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0578 - mae: 0.1830
13/13 [==============================] - 0s 3ms/step - loss: 0.0808 - mae: 0.2260

13/13 [==============================] - 0s 19ms/step - loss: 0.0808 - mae: 0.2260 - val_loss: 0.0124 - val_mae: 0.0882
Epoch 22/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0304 - mae: 0.1360
13/13 [==============================] - 0s 3ms/step - loss: 0.0837 - mae: 0.2299

13/13 [==============================] - 0s 19ms/step - loss: 0.0837 - mae: 0.2299 - val_loss: 0.0136 - val_mae: 0.0956
Epoch 23/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0992 - mae: 0.2629
13/13 [==============================] - 0s 3ms/step - loss: 0.0773 - mae: 0.2197

13/13 [==============================] - 0s 19ms/step - loss: 0.0773 - mae: 0.2197 - val_loss: 0.0136 - val_mae: 0.0962
Epoch 24/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0634 - mae: 0.2007
13/13 [==============================] - 0s 2ms/step - loss: 0.0698 - mae: 0.2041

13/13 [==============================] - 0s 19ms/step - loss: 0.0698 - mae: 0.2041 - val_loss: 0.0117 - val_mae: 0.0787
Epoch 25/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0792 - mae: 0.2011
13/13 [==============================] - 0s 2ms/step - loss: 0.0772 - mae: 0.2255

13/13 [==============================] - 0s 19ms/step - loss: 0.0772 - mae: 0.2255 - val_loss: 0.0120 - val_mae: 0.0869
Epoch 26/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0841 - mae: 0.2443
13/13 [==============================] - 0s 3ms/step - loss: 0.0675 - mae: 0.2123

13/13 [==============================] - 0s 21ms/step - loss: 0.0675 - mae: 0.2123 - val_loss: 0.0119 - val_mae: 0.0791
Epoch 27/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0410 - mae: 0.1574
13/13 [==============================] - 0s 3ms/step - loss: 0.0678 - mae: 0.2041

13/13 [==============================] - 0s 19ms/step - loss: 0.0678 - mae: 0.2041 - val_loss: 0.0144 - val_mae: 0.0876
Epoch 28/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0489 - mae: 0.1838
13/13 [==============================] - 0s 3ms/step - loss: 0.0741 - mae: 0.2136

13/13 [==============================] - 0s 20ms/step - loss: 0.0741 - mae: 0.2136 - val_loss: 0.0138 - val_mae: 0.0849
Epoch 29/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0575 - mae: 0.1862
13/13 [==============================] - 0s 3ms/step - loss: 0.0660 - mae: 0.2069

13/13 [==============================] - 0s 19ms/step - loss: 0.0660 - mae: 0.2069 - val_loss: 0.0119 - val_mae: 0.0867
Epoch 30/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0633 - mae: 0.2115
13/13 [==============================] - 0s 3ms/step - loss: 0.0668 - mae: 0.2066

13/13 [==============================] - 0s 20ms/step - loss: 0.0668 - mae: 0.2066 - val_loss: 0.0152 - val_mae: 0.1032
Epoch 31/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0582 - mae: 0.2012
13/13 [==============================] - 0s 3ms/step - loss: 0.0616 - mae: 0.1971

13/13 [==============================] - 0s 18ms/step - loss: 0.0616 - mae: 0.1971 - val_loss: 0.0129 - val_mae: 0.0929
Epoch 32/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0472 - mae: 0.1699
13/13 [==============================] - 0s 3ms/step - loss: 0.0549 - mae: 0.1831

13/13 [==============================] - 0s 19ms/step - loss: 0.0549 - mae: 0.1831 - val_loss: 0.0139 - val_mae: 0.0976
Epoch 33/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0618 - mae: 0.2155
13/13 [==============================] - 0s 3ms/step - loss: 0.0551 - mae: 0.1906

13/13 [==============================] - 0s 19ms/step - loss: 0.0551 - mae: 0.1906 - val_loss: 0.0112 - val_mae: 0.0818
Epoch 34/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0433 - mae: 0.1681
13/13 [==============================] - 0s 3ms/step - loss: 0.0555 - mae: 0.1841

13/13 [==============================] - 0s 18ms/step - loss: 0.0555 - mae: 0.1841 - val_loss: 0.0114 - val_mae: 0.0773
Epoch 35/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0460 - mae: 0.1753
13/13 [==============================] - 0s 2ms/step - loss: 0.0531 - mae: 0.1827

13/13 [==============================] - 0s 19ms/step - loss: 0.0531 - mae: 0.1827 - val_loss: 0.0110 - val_mae: 0.0768
Epoch 36/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0584 - mae: 0.2072
13/13 [==============================] - 0s 3ms/step - loss: 0.0595 - mae: 0.1957

13/13 [==============================] - 0s 18ms/step - loss: 0.0595 - mae: 0.1957 - val_loss: 0.0115 - val_mae: 0.0849
Epoch 37/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0738 - mae: 0.1913
13/13 [==============================] - 0s 3ms/step - loss: 0.0454 - mae: 0.1669

13/13 [==============================] - 0s 19ms/step - loss: 0.0454 - mae: 0.1669 - val_loss: 0.0109 - val_mae: 0.0764
Epoch 38/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0551 - mae: 0.1688
13/13 [==============================] - 0s 2ms/step - loss: 0.0468 - mae: 0.1699

13/13 [==============================] - 0s 18ms/step - loss: 0.0468 - mae: 0.1699 - val_loss: 0.0108 - val_mae: 0.0777
Epoch 39/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0446 - mae: 0.1606
13/13 [==============================] - 0s 3ms/step - loss: 0.0489 - mae: 0.1772

13/13 [==============================] - 0s 19ms/step - loss: 0.0489 - mae: 0.1772 - val_loss: 0.0108 - val_mae: 0.0785
Epoch 40/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0926 - mae: 0.2464
13/13 [==============================] - 0s 2ms/step - loss: 0.0517 - mae: 0.1747

13/13 [==============================] - 0s 18ms/step - loss: 0.0517 - mae: 0.1747 - val_loss: 0.0109 - val_mae: 0.0798
Epoch 41/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0592 - mae: 0.2037
13/13 [==============================] - 0s 2ms/step - loss: 0.0547 - mae: 0.1858

13/13 [==============================] - 0s 19ms/step - loss: 0.0547 - mae: 0.1858 - val_loss: 0.0107 - val_mae: 0.0778
Epoch 42/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0663 - mae: 0.2148
13/13 [==============================] - 0s 3ms/step - loss: 0.0421 - mae: 0.1612

13/13 [==============================] - 0s 18ms/step - loss: 0.0421 - mae: 0.1612 - val_loss: 0.0107 - val_mae: 0.0767
Epoch 43/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0385 - mae: 0.1569
13/13 [==============================] - 0s 3ms/step - loss: 0.0402 - mae: 0.1602

13/13 [==============================] - 0s 19ms/step - loss: 0.0402 - mae: 0.1602 - val_loss: 0.0107 - val_mae: 0.0754
Epoch 44/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0598 - mae: 0.1929
13/13 [==============================] - 0s 2ms/step - loss: 0.0474 - mae: 0.1750

13/13 [==============================] - 0s 18ms/step - loss: 0.0474 - mae: 0.1750 - val_loss: 0.0106 - val_mae: 0.0768
Epoch 45/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0276 - mae: 0.1362
13/13 [==============================] - 0s 3ms/step - loss: 0.0428 - mae: 0.1665

13/13 [==============================] - 0s 18ms/step - loss: 0.0428 - mae: 0.1665 - val_loss: 0.0106 - val_mae: 0.0786
Epoch 46/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0451 - mae: 0.1713
13/13 [==============================] - 0s 3ms/step - loss: 0.0438 - mae: 0.1695

13/13 [==============================] - 0s 20ms/step - loss: 0.0438 - mae: 0.1695 - val_loss: 0.0105 - val_mae: 0.0768
Epoch 47/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0301 - mae: 0.1400
13/13 [==============================] - 0s 3ms/step - loss: 0.0394 - mae: 0.1565

13/13 [==============================] - 0s 19ms/step - loss: 0.0394 - mae: 0.1565 - val_loss: 0.0106 - val_mae: 0.0748
Epoch 48/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0335 - mae: 0.1464
13/13 [==============================] - 0s 3ms/step - loss: 0.0420 - mae: 0.1673

13/13 [==============================] - 0s 20ms/step - loss: 0.0420 - mae: 0.1673 - val_loss: 0.0118 - val_mae: 0.0783
Epoch 49/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0656 - mae: 0.2138
13/13 [==============================] - 0s 2ms/step - loss: 0.0380 - mae: 0.1565

13/13 [==============================] - 0s 19ms/step - loss: 0.0380 - mae: 0.1565 - val_loss: 0.0109 - val_mae: 0.0808
Epoch 50/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0389 - mae: 0.1622
13/13 [==============================] - 0s 3ms/step - loss: 0.0402 - mae: 0.1569

13/13 [==============================] - 0s 18ms/step - loss: 0.0402 - mae: 0.1569 - val_loss: 0.0106 - val_mae: 0.0787

Run completed: runs/2022-12-07T02-35-28Z

Training run 14/52 (flags = list(32, 10, 0.001, 50, 50, "relu", "tanh", 0.2, 0.6)) 
Using run directory runs/2022-12-07T02-35-53Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

1/8 [==>...........................] - ETA: 3s - loss: 0.4561 - mae: 0.5275
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0014s vs `on_train_batch_end` time: 0.0042s). Check your callbacks.

8/8 [==============================] - 0s 2ms/step - loss: 0.7273 - mae: 0.6695

8/8 [==============================] - 1s 107ms/step - loss: 0.7273 - mae: 0.6695 - val_loss: 0.2184 - val_mae: 0.3864
Epoch 2/50

1/8 [==>...........................] - ETA: 0s - loss: 0.8145 - mae: 0.7342
8/8 [==============================] - 0s 3ms/step - loss: 0.6246 - mae: 0.6246

8/8 [==============================] - 0s 33ms/step - loss: 0.6246 - mae: 0.6246 - val_loss: 0.1992 - val_mae: 0.3673
Epoch 3/50

1/8 [==>...........................] - ETA: 0s - loss: 0.7447 - mae: 0.6862
8/8 [==============================] - 0s 4ms/step - loss: 0.6875 - mae: 0.6419

8/8 [==============================] - 0s 64ms/step - loss: 0.6875 - mae: 0.6419 - val_loss: 0.1822 - val_mae: 0.3497
Epoch 4/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6927 - mae: 0.6428
8/8 [==============================] - 0s 3ms/step - loss: 0.6946 - mae: 0.6619

8/8 [==============================] - 0s 35ms/step - loss: 0.6946 - mae: 0.6619 - val_loss: 0.1652 - val_mae: 0.3306
Epoch 5/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6564 - mae: 0.6471
8/8 [==============================] - 0s 2ms/step - loss: 0.6230 - mae: 0.6114

8/8 [==============================] - 0s 29ms/step - loss: 0.6230 - mae: 0.6114 - val_loss: 0.1506 - val_mae: 0.3137
Epoch 6/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6879 - mae: 0.6481
8/8 [==============================] - 0s 5ms/step - loss: 0.5726 - mae: 0.6037

8/8 [==============================] - 0s 61ms/step - loss: 0.5726 - mae: 0.6037 - val_loss: 0.1367 - val_mae: 0.2967
Epoch 7/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6690 - mae: 0.6467
8/8 [==============================] - 0s 4ms/step - loss: 0.6517 - mae: 0.6186

8/8 [==============================] - 0s 51ms/step - loss: 0.6517 - mae: 0.6186 - val_loss: 0.1225 - val_mae: 0.2775
Epoch 8/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5115 - mae: 0.5807
8/8 [==============================] - 0s 3ms/step - loss: 0.5215 - mae: 0.5748

8/8 [==============================] - 0s 30ms/step - loss: 0.5215 - mae: 0.5748 - val_loss: 0.1108 - val_mae: 0.2631
Epoch 9/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5334 - mae: 0.6103
8/8 [==============================] - 0s 3ms/step - loss: 0.4682 - mae: 0.5449

8/8 [==============================] - 0s 31ms/step - loss: 0.4682 - mae: 0.5449 - val_loss: 0.1026 - val_mae: 0.2530
Epoch 10/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4707 - mae: 0.5510
8/8 [==============================] - 0s 3ms/step - loss: 0.4974 - mae: 0.5599

8/8 [==============================] - 0s 40ms/step - loss: 0.4974 - mae: 0.5599 - val_loss: 0.0965 - val_mae: 0.2452
Epoch 11/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5404 - mae: 0.5675
8/8 [==============================] - 0s 3ms/step - loss: 0.5700 - mae: 0.5979

8/8 [==============================] - 0s 31ms/step - loss: 0.5700 - mae: 0.5979 - val_loss: 0.0881 - val_mae: 0.2336
Epoch 12/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4095 - mae: 0.5378
8/8 [==============================] - 0s 3ms/step - loss: 0.4278 - mae: 0.5304

8/8 [==============================] - 0s 35ms/step - loss: 0.4278 - mae: 0.5304 - val_loss: 0.0833 - val_mae: 0.2271
Epoch 13/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6080 - mae: 0.6153
8/8 [==============================] - 0s 2ms/step - loss: 0.4359 - mae: 0.5257

8/8 [==============================] - 0s 31ms/step - loss: 0.4359 - mae: 0.5257 - val_loss: 0.0775 - val_mae: 0.2187
Epoch 14/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3962 - mae: 0.5135
8/8 [==============================] - 0s 2ms/step - loss: 0.4515 - mae: 0.5286

8/8 [==============================] - 0s 31ms/step - loss: 0.4515 - mae: 0.5286 - val_loss: 0.0737 - val_mae: 0.2133
Epoch 15/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6120 - mae: 0.6479
8/8 [==============================] - 0s 3ms/step - loss: 0.4215 - mae: 0.5232

8/8 [==============================] - 0s 35ms/step - loss: 0.4215 - mae: 0.5232 - val_loss: 0.0701 - val_mae: 0.2081
Epoch 16/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3281 - mae: 0.4726
8/8 [==============================] - 0s 3ms/step - loss: 0.3607 - mae: 0.4787

8/8 [==============================] - 0s 31ms/step - loss: 0.3607 - mae: 0.4787 - val_loss: 0.0671 - val_mae: 0.2034
Epoch 17/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4551 - mae: 0.5319
8/8 [==============================] - 0s 3ms/step - loss: 0.4346 - mae: 0.5208

8/8 [==============================] - 1s 85ms/step - loss: 0.4346 - mae: 0.5208 - val_loss: 0.0643 - val_mae: 0.1991
Epoch 18/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4331 - mae: 0.5430
8/8 [==============================] - 0s 3ms/step - loss: 0.4277 - mae: 0.5356

8/8 [==============================] - 0s 29ms/step - loss: 0.4277 - mae: 0.5356 - val_loss: 0.0621 - val_mae: 0.1955
Epoch 19/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6163 - mae: 0.6208
8/8 [==============================] - 0s 3ms/step - loss: 0.4118 - mae: 0.5050

8/8 [==============================] - 0s 33ms/step - loss: 0.4118 - mae: 0.5050 - val_loss: 0.0594 - val_mae: 0.1914
Epoch 20/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3133 - mae: 0.4661
8/8 [==============================] - 0s 3ms/step - loss: 0.4072 - mae: 0.5032

8/8 [==============================] - 0s 31ms/step - loss: 0.4072 - mae: 0.5032 - val_loss: 0.0572 - val_mae: 0.1874
Epoch 21/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3658 - mae: 0.4994
8/8 [==============================] - 0s 3ms/step - loss: 0.3809 - mae: 0.4996

8/8 [==============================] - 0s 35ms/step - loss: 0.3809 - mae: 0.4996 - val_loss: 0.0550 - val_mae: 0.1831
Epoch 22/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4184 - mae: 0.5324
8/8 [==============================] - 0s 3ms/step - loss: 0.3620 - mae: 0.4885

8/8 [==============================] - 0s 31ms/step - loss: 0.3620 - mae: 0.4885 - val_loss: 0.0535 - val_mae: 0.1805
Epoch 23/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4041 - mae: 0.5190
8/8 [==============================] - 0s 3ms/step - loss: 0.4025 - mae: 0.5099

8/8 [==============================] - 0s 34ms/step - loss: 0.4025 - mae: 0.5099 - val_loss: 0.0520 - val_mae: 0.1791
Epoch 24/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2750 - mae: 0.4085
8/8 [==============================] - 0s 3ms/step - loss: 0.3645 - mae: 0.4795

8/8 [==============================] - 0s 33ms/step - loss: 0.3645 - mae: 0.4795 - val_loss: 0.0506 - val_mae: 0.1770
Epoch 25/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4677 - mae: 0.5158
8/8 [==============================] - 0s 2ms/step - loss: 0.4039 - mae: 0.5020

8/8 [==============================] - 0s 31ms/step - loss: 0.4039 - mae: 0.5020 - val_loss: 0.0487 - val_mae: 0.1748
Epoch 26/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3972 - mae: 0.4965
8/8 [==============================] - 0s 3ms/step - loss: 0.3564 - mae: 0.4745

8/8 [==============================] - 0s 31ms/step - loss: 0.3564 - mae: 0.4745 - val_loss: 0.0479 - val_mae: 0.1734
Epoch 27/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1802 - mae: 0.3687
8/8 [==============================] - 0s 3ms/step - loss: 0.3006 - mae: 0.4414

8/8 [==============================] - 0s 36ms/step - loss: 0.3006 - mae: 0.4414 - val_loss: 0.0467 - val_mae: 0.1710
Epoch 28/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3518 - mae: 0.4486
8/8 [==============================] - 0s 2ms/step - loss: 0.3616 - mae: 0.4748

8/8 [==============================] - 0s 31ms/step - loss: 0.3616 - mae: 0.4748 - val_loss: 0.0455 - val_mae: 0.1693
Epoch 29/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3467 - mae: 0.4570
8/8 [==============================] - 0s 3ms/step - loss: 0.3464 - mae: 0.4693

8/8 [==============================] - 0s 36ms/step - loss: 0.3464 - mae: 0.4693 - val_loss: 0.0446 - val_mae: 0.1679
Epoch 30/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4894 - mae: 0.5879
8/8 [==============================] - 0s 2ms/step - loss: 0.4000 - mae: 0.5128

8/8 [==============================] - 0s 31ms/step - loss: 0.4000 - mae: 0.5128 - val_loss: 0.0435 - val_mae: 0.1659
Epoch 31/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2883 - mae: 0.4676
8/8 [==============================] - 0s 3ms/step - loss: 0.3210 - mae: 0.4645

8/8 [==============================] - 0s 34ms/step - loss: 0.3210 - mae: 0.4645 - val_loss: 0.0426 - val_mae: 0.1639
Epoch 32/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3145 - mae: 0.4610
8/8 [==============================] - 0s 3ms/step - loss: 0.3383 - mae: 0.4671

8/8 [==============================] - 0s 32ms/step - loss: 0.3383 - mae: 0.4671 - val_loss: 0.0418 - val_mae: 0.1623
Epoch 33/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3942 - mae: 0.5181
8/8 [==============================] - 0s 3ms/step - loss: 0.3550 - mae: 0.4860

8/8 [==============================] - 0s 35ms/step - loss: 0.3550 - mae: 0.4860 - val_loss: 0.0409 - val_mae: 0.1605
Epoch 34/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1981 - mae: 0.3645
8/8 [==============================] - 0s 3ms/step - loss: 0.3553 - mae: 0.4776

8/8 [==============================] - 0s 31ms/step - loss: 0.3553 - mae: 0.4776 - val_loss: 0.0402 - val_mae: 0.1593
Epoch 35/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3586 - mae: 0.4691
8/8 [==============================] - 0s 3ms/step - loss: 0.3114 - mae: 0.4569

8/8 [==============================] - 0s 31ms/step - loss: 0.3114 - mae: 0.4569 - val_loss: 0.0397 - val_mae: 0.1583
Epoch 36/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3006 - mae: 0.4409
8/8 [==============================] - 0s 2ms/step - loss: 0.2923 - mae: 0.4284

8/8 [==============================] - 0s 33ms/step - loss: 0.2923 - mae: 0.4284 - val_loss: 0.0393 - val_mae: 0.1576
Epoch 37/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2873 - mae: 0.4419
8/8 [==============================] - 0s 3ms/step - loss: 0.3320 - mae: 0.4645

8/8 [==============================] - 0s 34ms/step - loss: 0.3320 - mae: 0.4645 - val_loss: 0.0389 - val_mae: 0.1569
Epoch 38/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2555 - mae: 0.4118
8/8 [==============================] - 0s 2ms/step - loss: 0.3004 - mae: 0.4384

8/8 [==============================] - 0s 29ms/step - loss: 0.3004 - mae: 0.4384 - val_loss: 0.0383 - val_mae: 0.1556
Epoch 39/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3531 - mae: 0.4672
8/8 [==============================] - 0s 3ms/step - loss: 0.3280 - mae: 0.4590

8/8 [==============================] - 0s 40ms/step - loss: 0.3280 - mae: 0.4590 - val_loss: 0.0373 - val_mae: 0.1531
Epoch 40/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3237 - mae: 0.4492
8/8 [==============================] - 0s 2ms/step - loss: 0.3229 - mae: 0.4505

8/8 [==============================] - 0s 34ms/step - loss: 0.3229 - mae: 0.4505 - val_loss: 0.0369 - val_mae: 0.1521
Epoch 41/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4158 - mae: 0.5480
8/8 [==============================] - 0s 3ms/step - loss: 0.2850 - mae: 0.4293

8/8 [==============================] - 0s 33ms/step - loss: 0.2850 - mae: 0.4293 - val_loss: 0.0360 - val_mae: 0.1500
Epoch 42/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2663 - mae: 0.4237
8/8 [==============================] - 0s 3ms/step - loss: 0.3080 - mae: 0.4438

8/8 [==============================] - 0s 33ms/step - loss: 0.3080 - mae: 0.4438 - val_loss: 0.0355 - val_mae: 0.1488
Epoch 43/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3100 - mae: 0.4448
8/8 [==============================] - 0s 3ms/step - loss: 0.2771 - mae: 0.4265

8/8 [==============================] - 0s 31ms/step - loss: 0.2771 - mae: 0.4265 - val_loss: 0.0348 - val_mae: 0.1473
Epoch 44/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2712 - mae: 0.4277
8/8 [==============================] - 0s 3ms/step - loss: 0.2746 - mae: 0.4107

8/8 [==============================] - 0s 36ms/step - loss: 0.2746 - mae: 0.4107 - val_loss: 0.0341 - val_mae: 0.1457
Epoch 45/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3465 - mae: 0.4809
8/8 [==============================] - 0s 3ms/step - loss: 0.3203 - mae: 0.4607

8/8 [==============================] - 0s 34ms/step - loss: 0.3203 - mae: 0.4607 - val_loss: 0.0337 - val_mae: 0.1447
Epoch 46/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2561 - mae: 0.4327
8/8 [==============================] - 0s 2ms/step - loss: 0.2758 - mae: 0.4193

8/8 [==============================] - 0s 35ms/step - loss: 0.2758 - mae: 0.4193 - val_loss: 0.0336 - val_mae: 0.1446
Epoch 47/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2092 - mae: 0.3785
8/8 [==============================] - 0s 2ms/step - loss: 0.2560 - mae: 0.4052

8/8 [==============================] - 0s 33ms/step - loss: 0.2560 - mae: 0.4052 - val_loss: 0.0331 - val_mae: 0.1434
Epoch 48/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3526 - mae: 0.4825
8/8 [==============================] - 0s 3ms/step - loss: 0.2946 - mae: 0.4328

8/8 [==============================] - 0s 33ms/step - loss: 0.2946 - mae: 0.4328 - val_loss: 0.0326 - val_mae: 0.1422
Epoch 49/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2448 - mae: 0.3963
8/8 [==============================] - 0s 3ms/step - loss: 0.2918 - mae: 0.4425

8/8 [==============================] - 0s 34ms/step - loss: 0.2918 - mae: 0.4425 - val_loss: 0.0322 - val_mae: 0.1412
Epoch 50/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3580 - mae: 0.5044
8/8 [==============================] - 0s 3ms/step - loss: 0.2802 - mae: 0.4325

8/8 [==============================] - 0s 32ms/step - loss: 0.2802 - mae: 0.4325 - val_loss: 0.0318 - val_mae: 0.1403

Run completed: runs/2022-12-07T02-35-53Z

Training run 15/52 (flags = list(16, 32, 0.001, 50, 30, "sigmoid", "sigmoid", 0.2, 0.6)) 
Using run directory runs/2022-12-07T02-36-19Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

1/8 [==>...........................] - ETA: 3s - loss: 1.1618 - mae: 0.8780
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0014s vs `on_train_batch_end` time: 0.0027s). Check your callbacks.

8/8 [==============================] - 0s 2ms/step - loss: 1.5771 - mae: 1.0299

8/8 [==============================] - 1s 119ms/step - loss: 1.5771 - mae: 1.0299 - val_loss: 0.8853 - val_mae: 0.9352
Epoch 2/30

1/8 [==>...........................] - ETA: 0s - loss: 1.7194 - mae: 1.1096
8/8 [==============================] - 0s 3ms/step - loss: 1.5764 - mae: 1.0508

8/8 [==============================] - 0s 37ms/step - loss: 1.5764 - mae: 1.0508 - val_loss: 0.6223 - val_mae: 0.7822
Epoch 3/30

1/8 [==>...........................] - ETA: 0s - loss: 1.4499 - mae: 0.9619
8/8 [==============================] - 0s 3ms/step - loss: 1.3428 - mae: 0.9523

8/8 [==============================] - 0s 42ms/step - loss: 1.3428 - mae: 0.9523 - val_loss: 0.4550 - val_mae: 0.6668
Epoch 4/30

1/8 [==>...........................] - ETA: 0s - loss: 1.2213 - mae: 0.8757
8/8 [==============================] - 0s 2ms/step - loss: 1.1838 - mae: 0.8735

8/8 [==============================] - 0s 29ms/step - loss: 1.1838 - mae: 0.8735 - val_loss: 0.3434 - val_mae: 0.5771
Epoch 5/30

1/8 [==>...........................] - ETA: 0s - loss: 0.7300 - mae: 0.6918
8/8 [==============================] - 0s 3ms/step - loss: 1.0399 - mae: 0.8278

8/8 [==============================] - 0s 54ms/step - loss: 1.0399 - mae: 0.8278 - val_loss: 0.2550 - val_mae: 0.4946
Epoch 6/30

1/8 [==>...........................] - ETA: 0s - loss: 0.8699 - mae: 0.7536
8/8 [==============================] - 0s 4ms/step - loss: 0.9649 - mae: 0.7812

8/8 [==============================] - 0s 59ms/step - loss: 0.9649 - mae: 0.7812 - val_loss: 0.1851 - val_mae: 0.4182
Epoch 7/30

1/8 [==>...........................] - ETA: 0s - loss: 0.8890 - mae: 0.8027
8/8 [==============================] - 0s 3ms/step - loss: 0.9037 - mae: 0.7783

8/8 [==============================] - 0s 28ms/step - loss: 0.9037 - mae: 0.7783 - val_loss: 0.1328 - val_mae: 0.3501
Epoch 8/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6817 - mae: 0.7230
8/8 [==============================] - 0s 3ms/step - loss: 0.8262 - mae: 0.7479

8/8 [==============================] - 0s 36ms/step - loss: 0.8262 - mae: 0.7479 - val_loss: 0.1066 - val_mae: 0.3104
Epoch 9/30

1/8 [==>...........................] - ETA: 0s - loss: 0.8314 - mae: 0.7213
8/8 [==============================] - 0s 3ms/step - loss: 0.8361 - mae: 0.7343

8/8 [==============================] - 0s 31ms/step - loss: 0.8361 - mae: 0.7343 - val_loss: 0.0838 - val_mae: 0.2713
Epoch 10/30

1/8 [==>...........................] - ETA: 0s - loss: 0.7651 - mae: 0.7159
8/8 [==============================] - 0s 3ms/step - loss: 0.7142 - mae: 0.6853

8/8 [==============================] - 0s 43ms/step - loss: 0.7142 - mae: 0.6853 - val_loss: 0.0596 - val_mae: 0.2246
Epoch 11/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6518 - mae: 0.6720
8/8 [==============================] - 0s 5ms/step - loss: 0.7131 - mae: 0.6988

8/8 [==============================] - 0s 64ms/step - loss: 0.7131 - mae: 0.6988 - val_loss: 0.0492 - val_mae: 0.2034
Epoch 12/30

1/8 [==>...........................] - ETA: 0s - loss: 0.7484 - mae: 0.7337
8/8 [==============================] - 0s 3ms/step - loss: 0.7022 - mae: 0.6887

8/8 [==============================] - 0s 35ms/step - loss: 0.7022 - mae: 0.6887 - val_loss: 0.0424 - val_mae: 0.1883
Epoch 13/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6631 - mae: 0.6635
8/8 [==============================] - 0s 3ms/step - loss: 0.7743 - mae: 0.7156

8/8 [==============================] - 0s 31ms/step - loss: 0.7743 - mae: 0.7156 - val_loss: 0.0365 - val_mae: 0.1744
Epoch 14/30

1/8 [==>...........................] - ETA: 0s - loss: 0.7089 - mae: 0.6945
8/8 [==============================] - 0s 3ms/step - loss: 0.7378 - mae: 0.6857

8/8 [==============================] - 0s 31ms/step - loss: 0.7378 - mae: 0.6857 - val_loss: 0.0285 - val_mae: 0.1530
Epoch 15/30

1/8 [==>...........................] - ETA: 0s - loss: 0.5100 - mae: 0.5931
8/8 [==============================] - 0s 4ms/step - loss: 0.5869 - mae: 0.6218

8/8 [==============================] - 0s 50ms/step - loss: 0.5869 - mae: 0.6218 - val_loss: 0.0252 - val_mae: 0.1426
Epoch 16/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6448 - mae: 0.6216
8/8 [==============================] - 0s 3ms/step - loss: 0.6277 - mae: 0.6267

8/8 [==============================] - 0s 48ms/step - loss: 0.6277 - mae: 0.6267 - val_loss: 0.0229 - val_mae: 0.1350
Epoch 17/30

1/8 [==>...........................] - ETA: 0s - loss: 0.7361 - mae: 0.7054
8/8 [==============================] - 0s 3ms/step - loss: 0.6907 - mae: 0.6777

8/8 [==============================] - 0s 35ms/step - loss: 0.6907 - mae: 0.6777 - val_loss: 0.0198 - val_mae: 0.1240
Epoch 18/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6864 - mae: 0.6592
8/8 [==============================] - 0s 3ms/step - loss: 0.6291 - mae: 0.6237

8/8 [==============================] - 0s 34ms/step - loss: 0.6291 - mae: 0.6237 - val_loss: 0.0192 - val_mae: 0.1216
Epoch 19/30

1/8 [==>...........................] - ETA: 0s - loss: 0.8436 - mae: 0.7255
8/8 [==============================] - 0s 4ms/step - loss: 0.6060 - mae: 0.6105

8/8 [==============================] - 0s 59ms/step - loss: 0.6060 - mae: 0.6105 - val_loss: 0.0171 - val_mae: 0.1132
Epoch 20/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6921 - mae: 0.7053
8/8 [==============================] - 0s 4ms/step - loss: 0.5923 - mae: 0.6112

8/8 [==============================] - 0s 58ms/step - loss: 0.5923 - mae: 0.6112 - val_loss: 0.0162 - val_mae: 0.1094
Epoch 21/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6584 - mae: 0.6777
8/8 [==============================] - 0s 4ms/step - loss: 0.6191 - mae: 0.6248

8/8 [==============================] - 0s 59ms/step - loss: 0.6191 - mae: 0.6248 - val_loss: 0.0143 - val_mae: 0.1005
Epoch 22/30

1/8 [==>...........................] - ETA: 0s - loss: 0.4755 - mae: 0.5666
8/8 [==============================] - 0s 4ms/step - loss: 0.5695 - mae: 0.6057

8/8 [==============================] - 0s 56ms/step - loss: 0.5695 - mae: 0.6057 - val_loss: 0.0147 - val_mae: 0.1022
Epoch 23/30

1/8 [==>...........................] - ETA: 0s - loss: 0.5643 - mae: 0.6071
8/8 [==============================] - 0s 3ms/step - loss: 0.5444 - mae: 0.5900

8/8 [==============================] - 0s 38ms/step - loss: 0.5444 - mae: 0.5900 - val_loss: 0.0137 - val_mae: 0.0977
Epoch 24/30

1/8 [==>...........................] - ETA: 0s - loss: 0.7769 - mae: 0.7684
8/8 [==============================] - 0s 3ms/step - loss: 0.6537 - mae: 0.6613

8/8 [==============================] - 0s 33ms/step - loss: 0.6537 - mae: 0.6613 - val_loss: 0.0119 - val_mae: 0.0894
Epoch 25/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6223 - mae: 0.6537
8/8 [==============================] - 0s 3ms/step - loss: 0.5811 - mae: 0.6072

8/8 [==============================] - 0s 36ms/step - loss: 0.5811 - mae: 0.6072 - val_loss: 0.0125 - val_mae: 0.0919
Epoch 26/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6039 - mae: 0.6627
8/8 [==============================] - 0s 3ms/step - loss: 0.5890 - mae: 0.6231

8/8 [==============================] - 0s 31ms/step - loss: 0.5890 - mae: 0.6231 - val_loss: 0.0131 - val_mae: 0.0946
Epoch 27/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3894 - mae: 0.5297
8/8 [==============================] - 0s 3ms/step - loss: 0.5097 - mae: 0.5807

8/8 [==============================] - 0s 35ms/step - loss: 0.5097 - mae: 0.5807 - val_loss: 0.0121 - val_mae: 0.0899
Epoch 28/30

1/8 [==>...........................] - ETA: 0s - loss: 0.5731 - mae: 0.6030
8/8 [==============================] - 0s 4ms/step - loss: 0.5402 - mae: 0.5824

8/8 [==============================] - 0s 35ms/step - loss: 0.5402 - mae: 0.5824 - val_loss: 0.0114 - val_mae: 0.0864
Epoch 29/30

1/8 [==>...........................] - ETA: 0s - loss: 0.5757 - mae: 0.6325
8/8 [==============================] - 0s 3ms/step - loss: 0.4602 - mae: 0.5550

8/8 [==============================] - 0s 33ms/step - loss: 0.4602 - mae: 0.5550 - val_loss: 0.0117 - val_mae: 0.0878
Epoch 30/30

1/8 [==>...........................] - ETA: 0s - loss: 0.6351 - mae: 0.6706
8/8 [==============================] - 0s 3ms/step - loss: 0.4839 - mae: 0.5699

8/8 [==============================] - 0s 36ms/step - loss: 0.4839 - mae: 0.5699 - val_loss: 0.0122 - val_mae: 0.0903

Run completed: runs/2022-12-07T02-36-19Z

Training run 16/52 (flags = list(64, 50, 0.01, 50, 30, "relu", "tanh", 0.2, 0.6)) 
Using run directory runs/2022-12-07T02-36-44Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

1/8 [==>...........................] - ETA: 4s - loss: 0.3578 - mae: 0.4986
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0018s vs `on_train_batch_end` time: 0.0035s). Check your callbacks.

8/8 [==============================] - 1s 3ms/step - loss: 0.4047 - mae: 0.5219

8/8 [==============================] - 1s 118ms/step - loss: 0.4047 - mae: 0.5219 - val_loss: 0.1079 - val_mae: 0.2643
Epoch 2/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3058 - mae: 0.4484
8/8 [==============================] - 0s 4ms/step - loss: 0.3656 - mae: 0.4803

8/8 [==============================] - 0s 33ms/step - loss: 0.3656 - mae: 0.4803 - val_loss: 0.0614 - val_mae: 0.2046
Epoch 3/30

1/8 [==>...........................] - ETA: 0s - loss: 0.4392 - mae: 0.5449
8/8 [==============================] - 0s 3ms/step - loss: 0.3017 - mae: 0.4472

8/8 [==============================] - 0s 35ms/step - loss: 0.3017 - mae: 0.4472 - val_loss: 0.0508 - val_mae: 0.1797
Epoch 4/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3173 - mae: 0.4478
8/8 [==============================] - 0s 3ms/step - loss: 0.2875 - mae: 0.4350

8/8 [==============================] - 0s 36ms/step - loss: 0.2875 - mae: 0.4350 - val_loss: 0.0452 - val_mae: 0.1674
Epoch 5/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3319 - mae: 0.4477
8/8 [==============================] - 0s 4ms/step - loss: 0.2570 - mae: 0.3909

8/8 [==============================] - 0s 54ms/step - loss: 0.2570 - mae: 0.3909 - val_loss: 0.0363 - val_mae: 0.1496
Epoch 6/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1884 - mae: 0.3426
8/8 [==============================] - 0s 4ms/step - loss: 0.2109 - mae: 0.3625

8/8 [==============================] - 0s 58ms/step - loss: 0.2109 - mae: 0.3625 - val_loss: 0.0315 - val_mae: 0.1441
Epoch 7/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3039 - mae: 0.4271
8/8 [==============================] - 0s 3ms/step - loss: 0.2043 - mae: 0.3567

8/8 [==============================] - 0s 30ms/step - loss: 0.2043 - mae: 0.3567 - val_loss: 0.0248 - val_mae: 0.1248
Epoch 8/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1712 - mae: 0.3306
8/8 [==============================] - 0s 3ms/step - loss: 0.2133 - mae: 0.3539

8/8 [==============================] - 0s 34ms/step - loss: 0.2133 - mae: 0.3539 - val_loss: 0.0242 - val_mae: 0.1204
Epoch 9/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2312 - mae: 0.3781
8/8 [==============================] - 0s 5ms/step - loss: 0.1884 - mae: 0.3419

8/8 [==============================] - 0s 45ms/step - loss: 0.1884 - mae: 0.3419 - val_loss: 0.0235 - val_mae: 0.1221
Epoch 10/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2975 - mae: 0.3692
8/8 [==============================] - 0s 5ms/step - loss: 0.1726 - mae: 0.3115

8/8 [==============================] - 0s 53ms/step - loss: 0.1726 - mae: 0.3115 - val_loss: 0.0201 - val_mae: 0.1129
Epoch 11/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1711 - mae: 0.3107
8/8 [==============================] - 0s 4ms/step - loss: 0.1772 - mae: 0.3221

8/8 [==============================] - 0s 55ms/step - loss: 0.1772 - mae: 0.3221 - val_loss: 0.0182 - val_mae: 0.1135
Epoch 12/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1351 - mae: 0.2763
8/8 [==============================] - 0s 4ms/step - loss: 0.1278 - mae: 0.2765

8/8 [==============================] - 0s 34ms/step - loss: 0.1278 - mae: 0.2765 - val_loss: 0.0171 - val_mae: 0.1008
Epoch 13/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1925 - mae: 0.3554
8/8 [==============================] - 0s 3ms/step - loss: 0.1424 - mae: 0.2977

8/8 [==============================] - 0s 46ms/step - loss: 0.1424 - mae: 0.2977 - val_loss: 0.0153 - val_mae: 0.0992
Epoch 14/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1474 - mae: 0.3038
8/8 [==============================] - 0s 4ms/step - loss: 0.1347 - mae: 0.2837

8/8 [==============================] - 0s 39ms/step - loss: 0.1347 - mae: 0.2837 - val_loss: 0.0162 - val_mae: 0.1064
Epoch 15/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1634 - mae: 0.3132
8/8 [==============================] - 0s 3ms/step - loss: 0.1355 - mae: 0.2885

8/8 [==============================] - 0s 31ms/step - loss: 0.1355 - mae: 0.2885 - val_loss: 0.0143 - val_mae: 0.0951
Epoch 16/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1296 - mae: 0.2941
8/8 [==============================] - 0s 3ms/step - loss: 0.1275 - mae: 0.2848

8/8 [==============================] - 0s 33ms/step - loss: 0.1275 - mae: 0.2848 - val_loss: 0.0134 - val_mae: 0.0906
Epoch 17/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1058 - mae: 0.2544
8/8 [==============================] - 0s 3ms/step - loss: 0.1026 - mae: 0.2565

8/8 [==============================] - 0s 31ms/step - loss: 0.1026 - mae: 0.2565 - val_loss: 0.0124 - val_mae: 0.0910
Epoch 18/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1171 - mae: 0.2669
8/8 [==============================] - 0s 3ms/step - loss: 0.1087 - mae: 0.2589

8/8 [==============================] - 0s 30ms/step - loss: 0.1087 - mae: 0.2589 - val_loss: 0.0128 - val_mae: 0.0970
Epoch 19/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1038 - mae: 0.2433
8/8 [==============================] - 0s 3ms/step - loss: 0.0981 - mae: 0.2477

8/8 [==============================] - 0s 33ms/step - loss: 0.0981 - mae: 0.2477 - val_loss: 0.0121 - val_mae: 0.0888
Epoch 20/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0843 - mae: 0.2242
8/8 [==============================] - 0s 3ms/step - loss: 0.1012 - mae: 0.2499

8/8 [==============================] - 0s 33ms/step - loss: 0.1012 - mae: 0.2499 - val_loss: 0.0110 - val_mae: 0.0788
Epoch 21/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0853 - mae: 0.2399
8/8 [==============================] - 0s 3ms/step - loss: 0.1082 - mae: 0.2541

8/8 [==============================] - 0s 31ms/step - loss: 0.1082 - mae: 0.2541 - val_loss: 0.0101 - val_mae: 0.0775
Epoch 22/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0788 - mae: 0.2285
8/8 [==============================] - 0s 3ms/step - loss: 0.0933 - mae: 0.2328

8/8 [==============================] - 0s 32ms/step - loss: 0.0933 - mae: 0.2328 - val_loss: 0.0099 - val_mae: 0.0766
Epoch 23/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1204 - mae: 0.2586
8/8 [==============================] - 0s 3ms/step - loss: 0.0816 - mae: 0.2268

8/8 [==============================] - 0s 32ms/step - loss: 0.0816 - mae: 0.2268 - val_loss: 0.0094 - val_mae: 0.0749
Epoch 24/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1112 - mae: 0.2703
8/8 [==============================] - 0s 3ms/step - loss: 0.0947 - mae: 0.2485

8/8 [==============================] - 0s 31ms/step - loss: 0.0947 - mae: 0.2485 - val_loss: 0.0093 - val_mae: 0.0767
Epoch 25/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0978 - mae: 0.2478
8/8 [==============================] - 0s 3ms/step - loss: 0.0851 - mae: 0.2342

8/8 [==============================] - 0s 31ms/step - loss: 0.0851 - mae: 0.2342 - val_loss: 0.0094 - val_mae: 0.0775
Epoch 26/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0796 - mae: 0.2080
8/8 [==============================] - 0s 3ms/step - loss: 0.0853 - mae: 0.2323

8/8 [==============================] - 0s 31ms/step - loss: 0.0853 - mae: 0.2323 - val_loss: 0.0086 - val_mae: 0.0716
Epoch 27/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0628 - mae: 0.2034
8/8 [==============================] - 0s 3ms/step - loss: 0.0651 - mae: 0.2073

8/8 [==============================] - 0s 31ms/step - loss: 0.0651 - mae: 0.2073 - val_loss: 0.0086 - val_mae: 0.0722
Epoch 28/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0777 - mae: 0.2187
8/8 [==============================] - 0s 3ms/step - loss: 0.0735 - mae: 0.2111

8/8 [==============================] - 0s 31ms/step - loss: 0.0735 - mae: 0.2111 - val_loss: 0.0088 - val_mae: 0.0735
Epoch 29/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0628 - mae: 0.2008
8/8 [==============================] - 0s 3ms/step - loss: 0.0774 - mae: 0.2165

8/8 [==============================] - 0s 31ms/step - loss: 0.0774 - mae: 0.2165 - val_loss: 0.0087 - val_mae: 0.0754
Epoch 30/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0726 - mae: 0.2075
8/8 [==============================] - 0s 3ms/step - loss: 0.0717 - mae: 0.2126

8/8 [==============================] - 0s 34ms/step - loss: 0.0717 - mae: 0.2126 - val_loss: 0.0085 - val_mae: 0.0752

Run completed: runs/2022-12-07T02-36-44Z

Training run 17/52 (flags = list(16, 50, 0.01, 30, 50, "relu", "relu", 0.2, 0.2)) 
Using run directory runs/2022-12-07T02-37-09Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

 1/13 [=>............................] - ETA: 6s - loss: 0.1636 - mae: 0.3553
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0012s vs `on_train_batch_end` time: 0.0043s). Check your callbacks.

13/13 [==============================] - 1s 2ms/step - loss: 0.1765 - mae: 0.3478

13/13 [==============================] - 1s 73ms/step - loss: 0.1765 - mae: 0.3478 - val_loss: 0.0597 - val_mae: 0.2180
Epoch 2/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1385 - mae: 0.3024
13/13 [==============================] - 0s 4ms/step - loss: 0.1246 - mae: 0.2822

13/13 [==============================] - 0s 24ms/step - loss: 0.1246 - mae: 0.2822 - val_loss: 0.0443 - val_mae: 0.1796
Epoch 3/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1058 - mae: 0.2427
13/13 [==============================] - 0s 4ms/step - loss: 0.1088 - mae: 0.2616

13/13 [==============================] - 0s 31ms/step - loss: 0.1088 - mae: 0.2616 - val_loss: 0.0394 - val_mae: 0.1715
Epoch 4/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0566 - mae: 0.2015
13/13 [==============================] - 0s 3ms/step - loss: 0.1063 - mae: 0.2591

13/13 [==============================] - 0s 19ms/step - loss: 0.1063 - mae: 0.2591 - val_loss: 0.0365 - val_mae: 0.1630
Epoch 5/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1472 - mae: 0.3045
11/13 [========================>.....] - ETA: 0s - loss: 0.0998 - mae: 0.2505
13/13 [==============================] - 0s 5ms/step - loss: 0.0996 - mae: 0.2491

13/13 [==============================] - 0s 33ms/step - loss: 0.0996 - mae: 0.2491 - val_loss: 0.0316 - val_mae: 0.1540
Epoch 6/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1044 - mae: 0.2434
12/13 [==========================>...] - ETA: 0s - loss: 0.0898 - mae: 0.2373
13/13 [==============================] - 0s 5ms/step - loss: 0.0896 - mae: 0.2372

13/13 [==============================] - 0s 34ms/step - loss: 0.0896 - mae: 0.2372 - val_loss: 0.0331 - val_mae: 0.1616
Epoch 7/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0648 - mae: 0.2128
13/13 [==============================] - 0s 4ms/step - loss: 0.0775 - mae: 0.2264

13/13 [==============================] - 0s 24ms/step - loss: 0.0775 - mae: 0.2264 - val_loss: 0.0270 - val_mae: 0.1415
Epoch 8/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0738 - mae: 0.2101
13/13 [==============================] - 0s 3ms/step - loss: 0.0841 - mae: 0.2310

13/13 [==============================] - 0s 21ms/step - loss: 0.0841 - mae: 0.2310 - val_loss: 0.0215 - val_mae: 0.1233
Epoch 9/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0926 - mae: 0.2399
13/13 [==============================] - 0s 3ms/step - loss: 0.0803 - mae: 0.2199

13/13 [==============================] - 0s 22ms/step - loss: 0.0803 - mae: 0.2199 - val_loss: 0.0218 - val_mae: 0.1271
Epoch 10/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0969 - mae: 0.2269
13/13 [==============================] - 0s 3ms/step - loss: 0.0838 - mae: 0.2143

13/13 [==============================] - 0s 23ms/step - loss: 0.0838 - mae: 0.2143 - val_loss: 0.0381 - val_mae: 0.1731
Epoch 11/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1084 - mae: 0.2689
13/13 [==============================] - 0s 4ms/step - loss: 0.0735 - mae: 0.2173

13/13 [==============================] - 0s 28ms/step - loss: 0.0735 - mae: 0.2173 - val_loss: 0.0285 - val_mae: 0.1477
Epoch 12/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0812 - mae: 0.2106
11/13 [========================>.....] - ETA: 0s - loss: 0.0568 - mae: 0.1871
13/13 [==============================] - 0s 5ms/step - loss: 0.0576 - mae: 0.1886

13/13 [==============================] - 0s 35ms/step - loss: 0.0576 - mae: 0.1886 - val_loss: 0.0202 - val_mae: 0.1228
Epoch 13/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0610 - mae: 0.1978
13/13 [==============================] - 0s 3ms/step - loss: 0.0493 - mae: 0.1740

13/13 [==============================] - 0s 26ms/step - loss: 0.0493 - mae: 0.1740 - val_loss: 0.0197 - val_mae: 0.1214
Epoch 14/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0825 - mae: 0.2209
13/13 [==============================] - 0s 3ms/step - loss: 0.0618 - mae: 0.1929

13/13 [==============================] - 1s 52ms/step - loss: 0.0618 - mae: 0.1929 - val_loss: 0.0197 - val_mae: 0.1209
Epoch 15/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0768 - mae: 0.2289
13/13 [==============================] - 0s 2ms/step - loss: 0.0476 - mae: 0.1718

13/13 [==============================] - 0s 18ms/step - loss: 0.0476 - mae: 0.1718 - val_loss: 0.0208 - val_mae: 0.1239
Epoch 16/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0511 - mae: 0.1884
13/13 [==============================] - 0s 3ms/step - loss: 0.0636 - mae: 0.1986

13/13 [==============================] - 0s 23ms/step - loss: 0.0636 - mae: 0.1986 - val_loss: 0.0164 - val_mae: 0.1098
Epoch 17/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0402 - mae: 0.1633
13/13 [==============================] - 0s 3ms/step - loss: 0.0534 - mae: 0.1707

13/13 [==============================] - 0s 29ms/step - loss: 0.0534 - mae: 0.1707 - val_loss: 0.0167 - val_mae: 0.1117
Epoch 18/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0617 - mae: 0.2098
13/13 [==============================] - 0s 3ms/step - loss: 0.0504 - mae: 0.1759

13/13 [==============================] - 0s 22ms/step - loss: 0.0504 - mae: 0.1759 - val_loss: 0.0159 - val_mae: 0.1088
Epoch 19/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0516 - mae: 0.1811
13/13 [==============================] - 0s 4ms/step - loss: 0.0496 - mae: 0.1694

13/13 [==============================] - 0s 27ms/step - loss: 0.0496 - mae: 0.1694 - val_loss: 0.0158 - val_mae: 0.1087
Epoch 20/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0450 - mae: 0.1603
13/13 [==============================] - 0s 3ms/step - loss: 0.0515 - mae: 0.1771

13/13 [==============================] - 0s 21ms/step - loss: 0.0515 - mae: 0.1771 - val_loss: 0.0173 - val_mae: 0.1113
Epoch 21/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0786 - mae: 0.2001
13/13 [==============================] - 0s 3ms/step - loss: 0.0492 - mae: 0.1721

13/13 [==============================] - 0s 23ms/step - loss: 0.0492 - mae: 0.1721 - val_loss: 0.0195 - val_mae: 0.1190
Epoch 22/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0374 - mae: 0.1526
13/13 [==============================] - 0s 3ms/step - loss: 0.0497 - mae: 0.1727

13/13 [==============================] - 0s 21ms/step - loss: 0.0497 - mae: 0.1727 - val_loss: 0.0168 - val_mae: 0.1097
Epoch 23/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0249 - mae: 0.1223
13/13 [==============================] - 0s 3ms/step - loss: 0.0398 - mae: 0.1528

13/13 [==============================] - 0s 25ms/step - loss: 0.0398 - mae: 0.1528 - val_loss: 0.0163 - val_mae: 0.1082
Epoch 24/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0522 - mae: 0.1710
13/13 [==============================] - 0s 3ms/step - loss: 0.0419 - mae: 0.1617

13/13 [==============================] - 0s 22ms/step - loss: 0.0419 - mae: 0.1617 - val_loss: 0.0153 - val_mae: 0.1060
Epoch 25/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1109 - mae: 0.2051
13/13 [==============================] - 0s 4ms/step - loss: 0.0403 - mae: 0.1520

13/13 [==============================] - 0s 25ms/step - loss: 0.0403 - mae: 0.1520 - val_loss: 0.0153 - val_mae: 0.1051
Epoch 26/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0323 - mae: 0.1515
13/13 [==============================] - 0s 3ms/step - loss: 0.0373 - mae: 0.1546

13/13 [==============================] - 0s 23ms/step - loss: 0.0373 - mae: 0.1546 - val_loss: 0.0147 - val_mae: 0.1033
Epoch 27/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0488 - mae: 0.1744
13/13 [==============================] - 0s 3ms/step - loss: 0.0413 - mae: 0.1542

13/13 [==============================] - 0s 22ms/step - loss: 0.0413 - mae: 0.1542 - val_loss: 0.0150 - val_mae: 0.1038
Epoch 28/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0421 - mae: 0.1685
13/13 [==============================] - 0s 3ms/step - loss: 0.0368 - mae: 0.1512

13/13 [==============================] - 0s 22ms/step - loss: 0.0368 - mae: 0.1512 - val_loss: 0.0135 - val_mae: 0.0990
Epoch 29/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0205 - mae: 0.1104
13/13 [==============================] - 0s 3ms/step - loss: 0.0335 - mae: 0.1438

13/13 [==============================] - 0s 22ms/step - loss: 0.0335 - mae: 0.1438 - val_loss: 0.0171 - val_mae: 0.1127
Epoch 30/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0676 - mae: 0.2063
13/13 [==============================] - 0s 3ms/step - loss: 0.0376 - mae: 0.1540

13/13 [==============================] - 0s 24ms/step - loss: 0.0376 - mae: 0.1540 - val_loss: 0.0154 - val_mae: 0.1064
Epoch 31/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0502 - mae: 0.1754
13/13 [==============================] - 0s 3ms/step - loss: 0.0380 - mae: 0.1543

13/13 [==============================] - 0s 23ms/step - loss: 0.0380 - mae: 0.1543 - val_loss: 0.0152 - val_mae: 0.1060
Epoch 32/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0486 - mae: 0.1633
13/13 [==============================] - 0s 3ms/step - loss: 0.0341 - mae: 0.1438

13/13 [==============================] - 0s 21ms/step - loss: 0.0341 - mae: 0.1438 - val_loss: 0.0149 - val_mae: 0.1047
Epoch 33/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0304 - mae: 0.1439
13/13 [==============================] - 0s 3ms/step - loss: 0.0308 - mae: 0.1411

13/13 [==============================] - 0s 23ms/step - loss: 0.0308 - mae: 0.1411 - val_loss: 0.0173 - val_mae: 0.1146
Epoch 34/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0279 - mae: 0.1367
13/13 [==============================] - 0s 3ms/step - loss: 0.0335 - mae: 0.1441

13/13 [==============================] - 0s 23ms/step - loss: 0.0335 - mae: 0.1441 - val_loss: 0.0164 - val_mae: 0.1112
Epoch 35/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0146 - mae: 0.1020
13/13 [==============================] - 0s 3ms/step - loss: 0.0335 - mae: 0.1411

13/13 [==============================] - 0s 22ms/step - loss: 0.0335 - mae: 0.1411 - val_loss: 0.0169 - val_mae: 0.1132
Epoch 36/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0343 - mae: 0.1446
13/13 [==============================] - 0s 4ms/step - loss: 0.0317 - mae: 0.1403

13/13 [==============================] - 0s 24ms/step - loss: 0.0317 - mae: 0.1403 - val_loss: 0.0151 - val_mae: 0.1061
Epoch 37/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0225 - mae: 0.1296
13/13 [==============================] - 0s 4ms/step - loss: 0.0338 - mae: 0.1468

13/13 [==============================] - 0s 26ms/step - loss: 0.0338 - mae: 0.1468 - val_loss: 0.0145 - val_mae: 0.1032
Epoch 38/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0354 - mae: 0.1539
13/13 [==============================] - 0s 3ms/step - loss: 0.0360 - mae: 0.1496

13/13 [==============================] - 0s 22ms/step - loss: 0.0360 - mae: 0.1496 - val_loss: 0.0137 - val_mae: 0.0998
Epoch 39/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0197 - mae: 0.1155
13/13 [==============================] - 0s 3ms/step - loss: 0.0260 - mae: 0.1311

13/13 [==============================] - 0s 25ms/step - loss: 0.0260 - mae: 0.1311 - val_loss: 0.0134 - val_mae: 0.0985
Epoch 40/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0386 - mae: 0.1565
13/13 [==============================] - 0s 4ms/step - loss: 0.0300 - mae: 0.1328

13/13 [==============================] - 0s 20ms/step - loss: 0.0300 - mae: 0.1328 - val_loss: 0.0152 - val_mae: 0.1067
Epoch 41/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0269 - mae: 0.1265
13/13 [==============================] - 0s 3ms/step - loss: 0.0330 - mae: 0.1377

13/13 [==============================] - 0s 21ms/step - loss: 0.0330 - mae: 0.1377 - val_loss: 0.0147 - val_mae: 0.1049
Epoch 42/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0223 - mae: 0.1138
13/13 [==============================] - 0s 3ms/step - loss: 0.0266 - mae: 0.1283

13/13 [==============================] - 0s 21ms/step - loss: 0.0266 - mae: 0.1283 - val_loss: 0.0143 - val_mae: 0.1033
Epoch 43/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0356 - mae: 0.1370
13/13 [==============================] - 0s 3ms/step - loss: 0.0264 - mae: 0.1271

13/13 [==============================] - 0s 22ms/step - loss: 0.0264 - mae: 0.1271 - val_loss: 0.0136 - val_mae: 0.1001
Epoch 44/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0147 - mae: 0.0938
13/13 [==============================] - 0s 3ms/step - loss: 0.0266 - mae: 0.1276

13/13 [==============================] - 0s 23ms/step - loss: 0.0266 - mae: 0.1276 - val_loss: 0.0128 - val_mae: 0.0964
Epoch 45/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0233 - mae: 0.1208
13/13 [==============================] - 0s 3ms/step - loss: 0.0255 - mae: 0.1221

13/13 [==============================] - 0s 21ms/step - loss: 0.0255 - mae: 0.1221 - val_loss: 0.0125 - val_mae: 0.0945
Epoch 46/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0207 - mae: 0.1157
13/13 [==============================] - 0s 3ms/step - loss: 0.0253 - mae: 0.1267

13/13 [==============================] - 0s 21ms/step - loss: 0.0253 - mae: 0.1267 - val_loss: 0.0123 - val_mae: 0.0932
Epoch 47/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0381 - mae: 0.1559
13/13 [==============================] - 0s 3ms/step - loss: 0.0277 - mae: 0.1341

13/13 [==============================] - 0s 21ms/step - loss: 0.0277 - mae: 0.1341 - val_loss: 0.0123 - val_mae: 0.0937
Epoch 48/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0178 - mae: 0.1086
13/13 [==============================] - 0s 3ms/step - loss: 0.0275 - mae: 0.1322

13/13 [==============================] - 0s 22ms/step - loss: 0.0275 - mae: 0.1322 - val_loss: 0.0129 - val_mae: 0.0970
Epoch 49/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0281 - mae: 0.1352
13/13 [==============================] - 0s 3ms/step - loss: 0.0278 - mae: 0.1317

13/13 [==============================] - 0s 21ms/step - loss: 0.0278 - mae: 0.1317 - val_loss: 0.0127 - val_mae: 0.0963
Epoch 50/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0206 - mae: 0.1133
13/13 [==============================] - 0s 3ms/step - loss: 0.0227 - mae: 0.1167

13/13 [==============================] - 0s 20ms/step - loss: 0.0227 - mae: 0.1167 - val_loss: 0.0115 - val_mae: 0.0895

Run completed: runs/2022-12-07T02-37-09Z

Training run 18/52 (flags = list(16, 32, 0.001, 30, 50, "relu", "tanh", 0.6, 0.2)) 
Using run directory runs/2022-12-07T02-37-40Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

 1/13 [=>............................] - ETA: 8s - loss: 0.6691 - mae: 0.7385
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0016s vs `on_train_batch_end` time: 0.0045s). Check your callbacks.

13/13 [==============================] - 1s 2ms/step - loss: 0.7823 - mae: 0.7758

13/13 [==============================] - 2s 69ms/step - loss: 0.7823 - mae: 0.7758 - val_loss: 0.5689 - val_mae: 0.7364
Epoch 2/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7767 - mae: 0.7744
13/13 [==============================] - 0s 3ms/step - loss: 0.6976 - mae: 0.7310

13/13 [==============================] - 0s 21ms/step - loss: 0.6976 - mae: 0.7310 - val_loss: 0.4207 - val_mae: 0.6296
Epoch 3/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7936 - mae: 0.7769
13/13 [==============================] - 0s 3ms/step - loss: 0.6509 - mae: 0.6764

13/13 [==============================] - 0s 20ms/step - loss: 0.6509 - mae: 0.6764 - val_loss: 0.3086 - val_mae: 0.5363
Epoch 4/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6618 - mae: 0.6978
13/13 [==============================] - 0s 3ms/step - loss: 0.5137 - mae: 0.5908

13/13 [==============================] - 0s 23ms/step - loss: 0.5137 - mae: 0.5908 - val_loss: 0.2262 - val_mae: 0.4558
Epoch 5/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5846 - mae: 0.6302
13/13 [==============================] - 0s 4ms/step - loss: 0.3705 - mae: 0.5063

13/13 [==============================] - 0s 37ms/step - loss: 0.3705 - mae: 0.5063 - val_loss: 0.1786 - val_mae: 0.4013
Epoch 6/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4597 - mae: 0.5643
13/13 [==============================] - 0s 3ms/step - loss: 0.4515 - mae: 0.5483

13/13 [==============================] - 0s 31ms/step - loss: 0.4515 - mae: 0.5483 - val_loss: 0.1265 - val_mae: 0.3322
Epoch 7/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4601 - mae: 0.5863
13/13 [==============================] - 0s 3ms/step - loss: 0.3277 - mae: 0.4793

13/13 [==============================] - 0s 22ms/step - loss: 0.3277 - mae: 0.4793 - val_loss: 0.0965 - val_mae: 0.2855
Epoch 8/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1904 - mae: 0.3533
13/13 [==============================] - 0s 3ms/step - loss: 0.3393 - mae: 0.4738

13/13 [==============================] - 0s 21ms/step - loss: 0.3393 - mae: 0.4738 - val_loss: 0.0762 - val_mae: 0.2494
Epoch 9/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3965 - mae: 0.5184
13/13 [==============================] - 0s 4ms/step - loss: 0.2976 - mae: 0.4423

13/13 [==============================] - 0s 30ms/step - loss: 0.2976 - mae: 0.4423 - val_loss: 0.0561 - val_mae: 0.2072
Epoch 10/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1980 - mae: 0.3295
13/13 [==============================] - 0s 4ms/step - loss: 0.2783 - mae: 0.4204

13/13 [==============================] - 0s 37ms/step - loss: 0.2783 - mae: 0.4204 - val_loss: 0.0432 - val_mae: 0.1764
Epoch 11/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2389 - mae: 0.4116
13/13 [==============================] - 0s 3ms/step - loss: 0.2732 - mae: 0.4292

13/13 [==============================] - 0s 20ms/step - loss: 0.2732 - mae: 0.4292 - val_loss: 0.0338 - val_mae: 0.1509
Epoch 12/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4158 - mae: 0.4897
13/13 [==============================] - 0s 3ms/step - loss: 0.2843 - mae: 0.4216

13/13 [==============================] - 0s 23ms/step - loss: 0.2843 - mae: 0.4216 - val_loss: 0.0276 - val_mae: 0.1323
Epoch 13/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2187 - mae: 0.3980
13/13 [==============================] - 0s 3ms/step - loss: 0.2034 - mae: 0.3666

13/13 [==============================] - 0s 23ms/step - loss: 0.2034 - mae: 0.3666 - val_loss: 0.0239 - val_mae: 0.1204
Epoch 14/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2025 - mae: 0.3506
13/13 [==============================] - 0s 3ms/step - loss: 0.2247 - mae: 0.3813

13/13 [==============================] - 0s 22ms/step - loss: 0.2247 - mae: 0.3813 - val_loss: 0.0209 - val_mae: 0.1099
Epoch 15/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2883 - mae: 0.4505
13/13 [==============================] - 0s 3ms/step - loss: 0.2751 - mae: 0.4173

13/13 [==============================] - 0s 20ms/step - loss: 0.2751 - mae: 0.4173 - val_loss: 0.0185 - val_mae: 0.1011
Epoch 16/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2523 - mae: 0.3740
13/13 [==============================] - 0s 3ms/step - loss: 0.2260 - mae: 0.3733

13/13 [==============================] - 0s 22ms/step - loss: 0.2260 - mae: 0.3733 - val_loss: 0.0171 - val_mae: 0.0979
Epoch 17/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2565 - mae: 0.3855
13/13 [==============================] - 0s 3ms/step - loss: 0.2164 - mae: 0.3677

13/13 [==============================] - 0s 21ms/step - loss: 0.2164 - mae: 0.3677 - val_loss: 0.0159 - val_mae: 0.0954
Epoch 18/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2755 - mae: 0.3954
13/13 [==============================] - 0s 3ms/step - loss: 0.2302 - mae: 0.3719

13/13 [==============================] - 0s 20ms/step - loss: 0.2302 - mae: 0.3719 - val_loss: 0.0148 - val_mae: 0.0918
Epoch 19/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3193 - mae: 0.4460
13/13 [==============================] - 0s 3ms/step - loss: 0.2143 - mae: 0.3719

13/13 [==============================] - 0s 21ms/step - loss: 0.2143 - mae: 0.3719 - val_loss: 0.0139 - val_mae: 0.0890
Epoch 20/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2449 - mae: 0.3907
13/13 [==============================] - 0s 3ms/step - loss: 0.2032 - mae: 0.3538

13/13 [==============================] - 0s 21ms/step - loss: 0.2032 - mae: 0.3538 - val_loss: 0.0133 - val_mae: 0.0877
Epoch 21/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1948 - mae: 0.3513
13/13 [==============================] - 0s 3ms/step - loss: 0.2200 - mae: 0.3719

13/13 [==============================] - 0s 19ms/step - loss: 0.2200 - mae: 0.3719 - val_loss: 0.0134 - val_mae: 0.0900
Epoch 22/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1539 - mae: 0.3228
13/13 [==============================] - 0s 3ms/step - loss: 0.1998 - mae: 0.3388

13/13 [==============================] - 0s 23ms/step - loss: 0.1998 - mae: 0.3388 - val_loss: 0.0132 - val_mae: 0.0895
Epoch 23/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2353 - mae: 0.3680
13/13 [==============================] - 0s 3ms/step - loss: 0.1752 - mae: 0.3182

13/13 [==============================] - 0s 21ms/step - loss: 0.1752 - mae: 0.3182 - val_loss: 0.0134 - val_mae: 0.0907
Epoch 24/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1883 - mae: 0.3602
13/13 [==============================] - 0s 3ms/step - loss: 0.2047 - mae: 0.3428

13/13 [==============================] - 0s 23ms/step - loss: 0.2047 - mae: 0.3428 - val_loss: 0.0133 - val_mae: 0.0890
Epoch 25/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2076 - mae: 0.3486
13/13 [==============================] - 0s 3ms/step - loss: 0.1903 - mae: 0.3359

13/13 [==============================] - 0s 20ms/step - loss: 0.1903 - mae: 0.3359 - val_loss: 0.0133 - val_mae: 0.0886
Epoch 26/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0937 - mae: 0.2455
13/13 [==============================] - 0s 3ms/step - loss: 0.1646 - mae: 0.3147

13/13 [==============================] - 0s 19ms/step - loss: 0.1646 - mae: 0.3147 - val_loss: 0.0127 - val_mae: 0.0863
Epoch 27/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1270 - mae: 0.3033
13/13 [==============================] - 0s 2ms/step - loss: 0.1703 - mae: 0.3236

13/13 [==============================] - 0s 20ms/step - loss: 0.1703 - mae: 0.3236 - val_loss: 0.0132 - val_mae: 0.0873
Epoch 28/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2101 - mae: 0.3987
13/13 [==============================] - 0s 3ms/step - loss: 0.1823 - mae: 0.3303

13/13 [==============================] - 0s 24ms/step - loss: 0.1823 - mae: 0.3303 - val_loss: 0.0121 - val_mae: 0.0837
Epoch 29/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1620 - mae: 0.3309
13/13 [==============================] - 0s 3ms/step - loss: 0.1490 - mae: 0.3028

13/13 [==============================] - 0s 22ms/step - loss: 0.1490 - mae: 0.3028 - val_loss: 0.0117 - val_mae: 0.0803
Epoch 30/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1670 - mae: 0.3557
13/13 [==============================] - 0s 3ms/step - loss: 0.1729 - mae: 0.3313

13/13 [==============================] - 0s 20ms/step - loss: 0.1729 - mae: 0.3313 - val_loss: 0.0123 - val_mae: 0.0830
Epoch 31/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2270 - mae: 0.3648
13/13 [==============================] - 0s 3ms/step - loss: 0.1769 - mae: 0.3362

13/13 [==============================] - 0s 22ms/step - loss: 0.1769 - mae: 0.3362 - val_loss: 0.0123 - val_mae: 0.0832
Epoch 32/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2092 - mae: 0.3598
13/13 [==============================] - 0s 3ms/step - loss: 0.1676 - mae: 0.3085

13/13 [==============================] - 0s 21ms/step - loss: 0.1676 - mae: 0.3085 - val_loss: 0.0121 - val_mae: 0.0817
Epoch 33/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3424 - mae: 0.4737
13/13 [==============================] - 0s 3ms/step - loss: 0.1793 - mae: 0.3271

13/13 [==============================] - 0s 21ms/step - loss: 0.1793 - mae: 0.3271 - val_loss: 0.0118 - val_mae: 0.0814
Epoch 34/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1415 - mae: 0.2601
13/13 [==============================] - 0s 3ms/step - loss: 0.1709 - mae: 0.3155

13/13 [==============================] - 0s 20ms/step - loss: 0.1709 - mae: 0.3155 - val_loss: 0.0113 - val_mae: 0.0803
Epoch 35/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2371 - mae: 0.4037
13/13 [==============================] - 0s 3ms/step - loss: 0.1871 - mae: 0.3265

13/13 [==============================] - 0s 22ms/step - loss: 0.1871 - mae: 0.3265 - val_loss: 0.0116 - val_mae: 0.0821
Epoch 36/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2683 - mae: 0.4095
13/13 [==============================] - 0s 3ms/step - loss: 0.1599 - mae: 0.3065

13/13 [==============================] - 0s 19ms/step - loss: 0.1599 - mae: 0.3065 - val_loss: 0.0119 - val_mae: 0.0823
Epoch 37/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1236 - mae: 0.3047
13/13 [==============================] - 0s 3ms/step - loss: 0.1841 - mae: 0.3301

13/13 [==============================] - 0s 22ms/step - loss: 0.1841 - mae: 0.3301 - val_loss: 0.0121 - val_mae: 0.0833
Epoch 38/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1145 - mae: 0.2726
13/13 [==============================] - 0s 3ms/step - loss: 0.1528 - mae: 0.2959

13/13 [==============================] - 0s 20ms/step - loss: 0.1528 - mae: 0.2959 - val_loss: 0.0125 - val_mae: 0.0845
Epoch 39/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1863 - mae: 0.3311
13/13 [==============================] - 0s 3ms/step - loss: 0.1826 - mae: 0.3202

13/13 [==============================] - 0s 22ms/step - loss: 0.1826 - mae: 0.3202 - val_loss: 0.0121 - val_mae: 0.0832
Epoch 40/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1394 - mae: 0.2724
13/13 [==============================] - 0s 3ms/step - loss: 0.1555 - mae: 0.3004

13/13 [==============================] - 0s 19ms/step - loss: 0.1555 - mae: 0.3004 - val_loss: 0.0111 - val_mae: 0.0801
Epoch 41/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1443 - mae: 0.2857
13/13 [==============================] - 0s 3ms/step - loss: 0.1770 - mae: 0.3192

13/13 [==============================] - 0s 24ms/step - loss: 0.1770 - mae: 0.3192 - val_loss: 0.0099 - val_mae: 0.0759
Epoch 42/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1696 - mae: 0.3034
13/13 [==============================] - 0s 3ms/step - loss: 0.1504 - mae: 0.2934

13/13 [==============================] - 0s 22ms/step - loss: 0.1504 - mae: 0.2934 - val_loss: 0.0104 - val_mae: 0.0775
Epoch 43/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1372 - mae: 0.2956
13/13 [==============================] - 0s 3ms/step - loss: 0.1609 - mae: 0.3040

13/13 [==============================] - 0s 20ms/step - loss: 0.1609 - mae: 0.3040 - val_loss: 0.0110 - val_mae: 0.0796
Epoch 44/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1475 - mae: 0.3152
13/13 [==============================] - 0s 3ms/step - loss: 0.1478 - mae: 0.2975

13/13 [==============================] - 0s 21ms/step - loss: 0.1478 - mae: 0.2975 - val_loss: 0.0100 - val_mae: 0.0757
Epoch 45/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1687 - mae: 0.3302
13/13 [==============================] - 0s 3ms/step - loss: 0.1251 - mae: 0.2735

13/13 [==============================] - 0s 24ms/step - loss: 0.1251 - mae: 0.2735 - val_loss: 0.0101 - val_mae: 0.0763
Epoch 46/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0855 - mae: 0.2072
13/13 [==============================] - 0s 3ms/step - loss: 0.1450 - mae: 0.2863

13/13 [==============================] - 0s 21ms/step - loss: 0.1450 - mae: 0.2863 - val_loss: 0.0099 - val_mae: 0.0755
Epoch 47/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1576 - mae: 0.2958
13/13 [==============================] - 0s 3ms/step - loss: 0.1510 - mae: 0.2952

13/13 [==============================] - 0s 19ms/step - loss: 0.1510 - mae: 0.2952 - val_loss: 0.0111 - val_mae: 0.0807
Epoch 48/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2684 - mae: 0.4162
13/13 [==============================] - 0s 3ms/step - loss: 0.1491 - mae: 0.2984

13/13 [==============================] - 0s 23ms/step - loss: 0.1491 - mae: 0.2984 - val_loss: 0.0115 - val_mae: 0.0820
Epoch 49/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1113 - mae: 0.2647
13/13 [==============================] - 0s 3ms/step - loss: 0.1660 - mae: 0.2985

13/13 [==============================] - 0s 22ms/step - loss: 0.1660 - mae: 0.2985 - val_loss: 0.0119 - val_mae: 0.0828
Epoch 50/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0650 - mae: 0.1942
13/13 [==============================] - 0s 3ms/step - loss: 0.1439 - mae: 0.2860

13/13 [==============================] - 0s 26ms/step - loss: 0.1439 - mae: 0.2860 - val_loss: 0.0124 - val_mae: 0.0847

Run completed: runs/2022-12-07T02-37-40Z

Training run 19/52 (flags = list(64, 10, 0.01, 30, 30, "sigmoid", "relu", 0.6, 0.2)) 
Using run directory runs/2022-12-07T02-38-17Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

 1/13 [=>............................] - ETA: 11s - loss: 2.9631 - mae: 1.3509
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0014s vs `on_train_batch_end` time: 0.0052s). Check your callbacks.

13/13 [==============================] - 1s 2ms/step - loss: 0.8185 - mae: 0.6620

13/13 [==============================] - 2s 76ms/step - loss: 0.8185 - mae: 0.6620 - val_loss: 0.2159 - val_mae: 0.4295
Epoch 2/30

 1/13 [=>............................] - ETA: 0s - loss: 0.6293 - mae: 0.6704
12/13 [==========================>...] - ETA: 0s - loss: 0.3748 - mae: 0.5058
13/13 [==============================] - 0s 5ms/step - loss: 0.3753 - mae: 0.5068

13/13 [==============================] - 0s 25ms/step - loss: 0.3753 - mae: 0.5068 - val_loss: 0.1463 - val_mae: 0.3506
Epoch 3/30

 1/13 [=>............................] - ETA: 0s - loss: 0.3067 - mae: 0.4532
10/13 [======================>.......] - ETA: 0s - loss: 0.3098 - mae: 0.4427
13/13 [==============================] - 0s 5ms/step - loss: 0.2996 - mae: 0.4325

13/13 [==============================] - 0s 40ms/step - loss: 0.2996 - mae: 0.4325 - val_loss: 0.1731 - val_mae: 0.3858
Epoch 4/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2889 - mae: 0.4555
13/13 [==============================] - 0s 4ms/step - loss: 0.2617 - mae: 0.4155

13/13 [==============================] - 0s 34ms/step - loss: 0.2617 - mae: 0.4155 - val_loss: 0.1419 - val_mae: 0.3471
Epoch 5/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2208 - mae: 0.3497
11/13 [========================>.....] - ETA: 0s - loss: 0.2037 - mae: 0.3642
13/13 [==============================] - 0s 5ms/step - loss: 0.2032 - mae: 0.3650

13/13 [==============================] - 0s 39ms/step - loss: 0.2032 - mae: 0.3650 - val_loss: 0.1098 - val_mae: 0.3024
Epoch 6/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1838 - mae: 0.3363
13/13 [==============================] - 0s 4ms/step - loss: 0.1891 - mae: 0.3526

13/13 [==============================] - 0s 22ms/step - loss: 0.1891 - mae: 0.3526 - val_loss: 0.0805 - val_mae: 0.2542
Epoch 7/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2723 - mae: 0.4242
13/13 [==============================] - 0s 4ms/step - loss: 0.1616 - mae: 0.3273

13/13 [==============================] - 0s 29ms/step - loss: 0.1616 - mae: 0.3273 - val_loss: 0.0917 - val_mae: 0.2746
Epoch 8/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1714 - mae: 0.3283
13/13 [==============================] - 0s 3ms/step - loss: 0.1537 - mae: 0.3194

13/13 [==============================] - 0s 21ms/step - loss: 0.1537 - mae: 0.3194 - val_loss: 0.0869 - val_mae: 0.2670
Epoch 9/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0930 - mae: 0.2457
13/13 [==============================] - 0s 3ms/step - loss: 0.1507 - mae: 0.3161

13/13 [==============================] - 0s 27ms/step - loss: 0.1507 - mae: 0.3161 - val_loss: 0.0871 - val_mae: 0.2675
Epoch 10/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1684 - mae: 0.3123
13/13 [==============================] - 0s 4ms/step - loss: 0.1329 - mae: 0.2944

13/13 [==============================] - 0s 30ms/step - loss: 0.1329 - mae: 0.2944 - val_loss: 0.0656 - val_mae: 0.2291
Epoch 11/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1363 - mae: 0.2938
13/13 [==============================] - 0s 4ms/step - loss: 0.1169 - mae: 0.2736

13/13 [==============================] - 0s 26ms/step - loss: 0.1169 - mae: 0.2736 - val_loss: 0.0594 - val_mae: 0.2175
Epoch 12/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0810 - mae: 0.2114
13/13 [==============================] - 0s 3ms/step - loss: 0.1037 - mae: 0.2574

13/13 [==============================] - 0s 24ms/step - loss: 0.1037 - mae: 0.2574 - val_loss: 0.0653 - val_mae: 0.2287
Epoch 13/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1025 - mae: 0.2576
13/13 [==============================] - 0s 3ms/step - loss: 0.1113 - mae: 0.2704

13/13 [==============================] - 0s 25ms/step - loss: 0.1113 - mae: 0.2704 - val_loss: 0.0562 - val_mae: 0.2111
Epoch 14/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0744 - mae: 0.2126
13/13 [==============================] - 0s 3ms/step - loss: 0.0947 - mae: 0.2435

13/13 [==============================] - 0s 24ms/step - loss: 0.0947 - mae: 0.2435 - val_loss: 0.0467 - val_mae: 0.1903
Epoch 15/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0810 - mae: 0.2198
13/13 [==============================] - 0s 4ms/step - loss: 0.0870 - mae: 0.2356

13/13 [==============================] - 0s 38ms/step - loss: 0.0870 - mae: 0.2356 - val_loss: 0.0578 - val_mae: 0.2144
Epoch 16/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1024 - mae: 0.2392
13/13 [==============================] - 0s 4ms/step - loss: 0.0935 - mae: 0.2400

13/13 [==============================] - 0s 35ms/step - loss: 0.0935 - mae: 0.2400 - val_loss: 0.0459 - val_mae: 0.1885
Epoch 17/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0756 - mae: 0.2042
13/13 [==============================] - 0s 4ms/step - loss: 0.0807 - mae: 0.2307

13/13 [==============================] - 0s 38ms/step - loss: 0.0807 - mae: 0.2307 - val_loss: 0.0443 - val_mae: 0.1847
Epoch 18/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0803 - mae: 0.2262
13/13 [==============================] - 0s 4ms/step - loss: 0.0600 - mae: 0.1949

13/13 [==============================] - 0s 22ms/step - loss: 0.0600 - mae: 0.1949 - val_loss: 0.0391 - val_mae: 0.1713
Epoch 19/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0405 - mae: 0.1722
13/13 [==============================] - 0s 4ms/step - loss: 0.0721 - mae: 0.2236

13/13 [==============================] - 0s 29ms/step - loss: 0.0721 - mae: 0.2236 - val_loss: 0.0406 - val_mae: 0.1752
Epoch 20/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0479 - mae: 0.1753
13/13 [==============================] - 0s 4ms/step - loss: 0.0643 - mae: 0.2036

13/13 [==============================] - 1s 67ms/step - loss: 0.0643 - mae: 0.2036 - val_loss: 0.0374 - val_mae: 0.1665
Epoch 21/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0881 - mae: 0.2440
13/13 [==============================] - 0s 3ms/step - loss: 0.0559 - mae: 0.1886

13/13 [==============================] - 0s 39ms/step - loss: 0.0559 - mae: 0.1886 - val_loss: 0.0333 - val_mae: 0.1542
Epoch 22/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0556 - mae: 0.2017
12/13 [==========================>...] - ETA: 0s - loss: 0.0613 - mae: 0.1967
13/13 [==============================] - 0s 5ms/step - loss: 0.0611 - mae: 0.1965

13/13 [==============================] - 0s 39ms/step - loss: 0.0611 - mae: 0.1965 - val_loss: 0.0319 - val_mae: 0.1496
Epoch 23/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0777 - mae: 0.2239
13/13 [==============================] - 0s 3ms/step - loss: 0.0592 - mae: 0.1894

13/13 [==============================] - 0s 23ms/step - loss: 0.0592 - mae: 0.1894 - val_loss: 0.0360 - val_mae: 0.1621
Epoch 24/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0716 - mae: 0.1976
13/13 [==============================] - 0s 3ms/step - loss: 0.0514 - mae: 0.1818

13/13 [==============================] - 0s 30ms/step - loss: 0.0514 - mae: 0.1818 - val_loss: 0.0325 - val_mae: 0.1510
Epoch 25/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0552 - mae: 0.1917
13/13 [==============================] - 0s 3ms/step - loss: 0.0547 - mae: 0.1880

13/13 [==============================] - 0s 21ms/step - loss: 0.0547 - mae: 0.1880 - val_loss: 0.0273 - val_mae: 0.1338
Epoch 26/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0601 - mae: 0.1903
13/13 [==============================] - 0s 4ms/step - loss: 0.0451 - mae: 0.1675

13/13 [==============================] - 0s 30ms/step - loss: 0.0451 - mae: 0.1675 - val_loss: 0.0268 - val_mae: 0.1321
Epoch 27/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0592 - mae: 0.2060
13/13 [==============================] - 0s 3ms/step - loss: 0.0505 - mae: 0.1795

13/13 [==============================] - 0s 23ms/step - loss: 0.0505 - mae: 0.1795 - val_loss: 0.0300 - val_mae: 0.1425
Epoch 28/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0435 - mae: 0.1577
13/13 [==============================] - 0s 4ms/step - loss: 0.0463 - mae: 0.1718

13/13 [==============================] - 0s 23ms/step - loss: 0.0463 - mae: 0.1718 - val_loss: 0.0291 - val_mae: 0.1394
Epoch 29/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0300 - mae: 0.1526
13/13 [==============================] - 0s 3ms/step - loss: 0.0417 - mae: 0.1666

13/13 [==============================] - 0s 29ms/step - loss: 0.0417 - mae: 0.1666 - val_loss: 0.0276 - val_mae: 0.1339
Epoch 30/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0276 - mae: 0.1358
13/13 [==============================] - 0s 3ms/step - loss: 0.0382 - mae: 0.1551

13/13 [==============================] - 0s 27ms/step - loss: 0.0382 - mae: 0.1551 - val_loss: 0.0296 - val_mae: 0.1409

Run completed: runs/2022-12-07T02-38-17Z

Training run 20/52 (flags = list(32, 32, 0.001, 30, 30, "relu", "sigmoid", 0.6, 0.2)) 
Using run directory runs/2022-12-07T02-39-04Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

 1/13 [=>............................] - ETA: 5s - loss: 0.2548 - mae: 0.4437
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0010s vs `on_train_batch_end` time: 0.0030s). Check your callbacks.

13/13 [==============================] - 1s 2ms/step - loss: 0.3358 - mae: 0.4738

13/13 [==============================] - 1s 75ms/step - loss: 0.3358 - mae: 0.4738 - val_loss: 0.1733 - val_mae: 0.3376
Epoch 2/30

 1/13 [=>............................] - ETA: 0s - loss: 0.3118 - mae: 0.4930
13/13 [==============================] - 0s 2ms/step - loss: 0.2841 - mae: 0.4308

13/13 [==============================] - 0s 19ms/step - loss: 0.2841 - mae: 0.4308 - val_loss: 0.1341 - val_mae: 0.2894
Epoch 3/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4055 - mae: 0.5101
13/13 [==============================] - 0s 4ms/step - loss: 0.2698 - mae: 0.4216

13/13 [==============================] - 0s 29ms/step - loss: 0.2698 - mae: 0.4216 - val_loss: 0.1097 - val_mae: 0.2576
Epoch 4/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2363 - mae: 0.3715
13/13 [==============================] - 0s 4ms/step - loss: 0.2420 - mae: 0.3881

13/13 [==============================] - 0s 35ms/step - loss: 0.2420 - mae: 0.3881 - val_loss: 0.0934 - val_mae: 0.2380
Epoch 5/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1723 - mae: 0.3403
13/13 [==============================] - 0s 2ms/step - loss: 0.2107 - mae: 0.3519

13/13 [==============================] - 0s 17ms/step - loss: 0.2107 - mae: 0.3519 - val_loss: 0.0814 - val_mae: 0.2236
Epoch 6/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2020 - mae: 0.3541
13/13 [==============================] - 0s 3ms/step - loss: 0.2422 - mae: 0.3844

13/13 [==============================] - 0s 19ms/step - loss: 0.2422 - mae: 0.3844 - val_loss: 0.0734 - val_mae: 0.2129
Epoch 7/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1979 - mae: 0.3276
13/13 [==============================] - 0s 3ms/step - loss: 0.2141 - mae: 0.3718

13/13 [==============================] - 0s 19ms/step - loss: 0.2141 - mae: 0.3718 - val_loss: 0.0685 - val_mae: 0.2075
Epoch 8/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2106 - mae: 0.3359
13/13 [==============================] - 0s 3ms/step - loss: 0.2339 - mae: 0.3796

13/13 [==============================] - 0s 20ms/step - loss: 0.2339 - mae: 0.3796 - val_loss: 0.0650 - val_mae: 0.2030
Epoch 9/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2441 - mae: 0.3974
12/13 [==========================>...] - ETA: 0s - loss: 0.1830 - mae: 0.3407
13/13 [==============================] - 0s 5ms/step - loss: 0.1822 - mae: 0.3397

13/13 [==============================] - 0s 32ms/step - loss: 0.1822 - mae: 0.3397 - val_loss: 0.0629 - val_mae: 0.2003
Epoch 10/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2853 - mae: 0.4471
13/13 [==============================] - 0s 4ms/step - loss: 0.1968 - mae: 0.3576

13/13 [==============================] - 0s 31ms/step - loss: 0.1968 - mae: 0.3576 - val_loss: 0.0602 - val_mae: 0.1968
Epoch 11/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2893 - mae: 0.4242
13/13 [==============================] - 0s 2ms/step - loss: 0.2034 - mae: 0.3612

13/13 [==============================] - 0s 17ms/step - loss: 0.2034 - mae: 0.3612 - val_loss: 0.0581 - val_mae: 0.1940
Epoch 12/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2392 - mae: 0.3793
13/13 [==============================] - 0s 3ms/step - loss: 0.1735 - mae: 0.3229

13/13 [==============================] - 1s 51ms/step - loss: 0.1735 - mae: 0.3229 - val_loss: 0.0566 - val_mae: 0.1920
Epoch 13/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1410 - mae: 0.3178
13/13 [==============================] - 0s 2ms/step - loss: 0.1787 - mae: 0.3418

13/13 [==============================] - 0s 17ms/step - loss: 0.1787 - mae: 0.3418 - val_loss: 0.0556 - val_mae: 0.1906
Epoch 14/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1598 - mae: 0.3169
13/13 [==============================] - 0s 3ms/step - loss: 0.1935 - mae: 0.3482

13/13 [==============================] - 0s 20ms/step - loss: 0.1935 - mae: 0.3482 - val_loss: 0.0545 - val_mae: 0.1887
Epoch 15/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1706 - mae: 0.3349
13/13 [==============================] - 0s 2ms/step - loss: 0.1991 - mae: 0.3577

13/13 [==============================] - 0s 19ms/step - loss: 0.1991 - mae: 0.3577 - val_loss: 0.0529 - val_mae: 0.1879
Epoch 16/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1628 - mae: 0.3247
13/13 [==============================] - 0s 2ms/step - loss: 0.1727 - mae: 0.3275

13/13 [==============================] - 0s 18ms/step - loss: 0.1727 - mae: 0.3275 - val_loss: 0.0519 - val_mae: 0.1867
Epoch 17/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1971 - mae: 0.3444
13/13 [==============================] - 0s 3ms/step - loss: 0.1791 - mae: 0.3315

13/13 [==============================] - 0s 18ms/step - loss: 0.1791 - mae: 0.3315 - val_loss: 0.0510 - val_mae: 0.1849
Epoch 18/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2522 - mae: 0.3942
13/13 [==============================] - 0s 3ms/step - loss: 0.2003 - mae: 0.3548

13/13 [==============================] - 0s 21ms/step - loss: 0.2003 - mae: 0.3548 - val_loss: 0.0497 - val_mae: 0.1847
Epoch 19/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1796 - mae: 0.3474
13/13 [==============================] - 0s 2ms/step - loss: 0.1956 - mae: 0.3444

13/13 [==============================] - 0s 18ms/step - loss: 0.1956 - mae: 0.3444 - val_loss: 0.0488 - val_mae: 0.1829
Epoch 20/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1753 - mae: 0.3386
13/13 [==============================] - 0s 3ms/step - loss: 0.1885 - mae: 0.3431

13/13 [==============================] - 0s 18ms/step - loss: 0.1885 - mae: 0.3431 - val_loss: 0.0478 - val_mae: 0.1799
Epoch 21/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2019 - mae: 0.3347
13/13 [==============================] - 0s 3ms/step - loss: 0.1717 - mae: 0.3276

13/13 [==============================] - 0s 19ms/step - loss: 0.1717 - mae: 0.3276 - val_loss: 0.0470 - val_mae: 0.1790
Epoch 22/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1366 - mae: 0.3026
13/13 [==============================] - 0s 3ms/step - loss: 0.1772 - mae: 0.3390

13/13 [==============================] - 0s 19ms/step - loss: 0.1772 - mae: 0.3390 - val_loss: 0.0461 - val_mae: 0.1771
Epoch 23/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1869 - mae: 0.3667
13/13 [==============================] - 0s 2ms/step - loss: 0.1661 - mae: 0.3233

13/13 [==============================] - 0s 18ms/step - loss: 0.1661 - mae: 0.3233 - val_loss: 0.0451 - val_mae: 0.1766
Epoch 24/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2555 - mae: 0.3722
13/13 [==============================] - 0s 3ms/step - loss: 0.1675 - mae: 0.3172

13/13 [==============================] - 0s 19ms/step - loss: 0.1675 - mae: 0.3172 - val_loss: 0.0440 - val_mae: 0.1748
Epoch 25/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1940 - mae: 0.3567
13/13 [==============================] - 0s 2ms/step - loss: 0.1696 - mae: 0.3209

13/13 [==============================] - 0s 19ms/step - loss: 0.1696 - mae: 0.3209 - val_loss: 0.0440 - val_mae: 0.1757
Epoch 26/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1118 - mae: 0.2560
13/13 [==============================] - 0s 3ms/step - loss: 0.1446 - mae: 0.3002

13/13 [==============================] - 0s 19ms/step - loss: 0.1446 - mae: 0.3002 - val_loss: 0.0429 - val_mae: 0.1730
Epoch 27/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1748 - mae: 0.3399
13/13 [==============================] - 0s 3ms/step - loss: 0.1682 - mae: 0.3273

13/13 [==============================] - 0s 18ms/step - loss: 0.1682 - mae: 0.3273 - val_loss: 0.0419 - val_mae: 0.1703
Epoch 28/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2147 - mae: 0.3780
13/13 [==============================] - 0s 3ms/step - loss: 0.1760 - mae: 0.3280

13/13 [==============================] - 0s 18ms/step - loss: 0.1760 - mae: 0.3280 - val_loss: 0.0410 - val_mae: 0.1680
Epoch 29/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1839 - mae: 0.3546
13/13 [==============================] - 0s 3ms/step - loss: 0.1560 - mae: 0.3161

13/13 [==============================] - 0s 20ms/step - loss: 0.1560 - mae: 0.3161 - val_loss: 0.0399 - val_mae: 0.1647
Epoch 30/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0934 - mae: 0.2387
13/13 [==============================] - 0s 3ms/step - loss: 0.1501 - mae: 0.3077

13/13 [==============================] - 0s 18ms/step - loss: 0.1501 - mae: 0.3077 - val_loss: 0.0390 - val_mae: 0.1627

Run completed: runs/2022-12-07T02-39-04Z

Training run 21/52 (flags = list(32, 10, 0.001, 30, 50, "relu", "tanh", 0.2, 0.2)) 
Using run directory runs/2022-12-07T02-39-27Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

 1/13 [=>............................] - ETA: 6s - loss: 1.8158 - mae: 1.2581
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0019s vs `on_train_batch_end` time: 0.0060s). Check your callbacks.

13/13 [==============================] - 1s 2ms/step - loss: 1.4340 - mae: 1.1079

13/13 [==============================] - 2s 78ms/step - loss: 1.4340 - mae: 1.1079 - val_loss: 1.2608 - val_mae: 1.0991
Epoch 2/50

 1/13 [=>............................] - ETA: 0s - loss: 1.5332 - mae: 1.1536
13/13 [==============================] - 0s 2ms/step - loss: 1.0329 - mae: 0.9263

13/13 [==============================] - 0s 18ms/step - loss: 1.0329 - mae: 0.9263 - val_loss: 0.9158 - val_mae: 0.9278
Epoch 3/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7329 - mae: 0.7469
13/13 [==============================] - 0s 4ms/step - loss: 0.8565 - mae: 0.8270

13/13 [==============================] - 0s 35ms/step - loss: 0.8565 - mae: 0.8270 - val_loss: 0.6756 - val_mae: 0.7864
Epoch 4/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7769 - mae: 0.7521
13/13 [==============================] - 0s 3ms/step - loss: 0.6302 - mae: 0.6798

13/13 [==============================] - 0s 23ms/step - loss: 0.6302 - mae: 0.6798 - val_loss: 0.5205 - val_mae: 0.6800
Epoch 5/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6887 - mae: 0.7110
13/13 [==============================] - 0s 3ms/step - loss: 0.4894 - mae: 0.5983

13/13 [==============================] - 0s 18ms/step - loss: 0.4894 - mae: 0.5983 - val_loss: 0.4036 - val_mae: 0.5863
Epoch 6/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6009 - mae: 0.6416
13/13 [==============================] - 0s 3ms/step - loss: 0.4118 - mae: 0.5348

13/13 [==============================] - 0s 20ms/step - loss: 0.4118 - mae: 0.5348 - val_loss: 0.3104 - val_mae: 0.5005
Epoch 7/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3151 - mae: 0.4698
13/13 [==============================] - 0s 2ms/step - loss: 0.3611 - mae: 0.4939

13/13 [==============================] - 0s 19ms/step - loss: 0.3611 - mae: 0.4939 - val_loss: 0.2376 - val_mae: 0.4244
Epoch 8/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3682 - mae: 0.5207
13/13 [==============================] - 0s 2ms/step - loss: 0.3246 - mae: 0.4588

13/13 [==============================] - 0s 20ms/step - loss: 0.3246 - mae: 0.4588 - val_loss: 0.1909 - val_mae: 0.3702
Epoch 9/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2830 - mae: 0.4281
13/13 [==============================] - 0s 4ms/step - loss: 0.2897 - mae: 0.4324

13/13 [==============================] - 0s 31ms/step - loss: 0.2897 - mae: 0.4324 - val_loss: 0.1613 - val_mae: 0.3329
Epoch 10/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2282 - mae: 0.3944
13/13 [==============================] - 0s 4ms/step - loss: 0.2617 - mae: 0.4147

13/13 [==============================] - 0s 33ms/step - loss: 0.2617 - mae: 0.4147 - val_loss: 0.1290 - val_mae: 0.2878
Epoch 11/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1859 - mae: 0.3405
13/13 [==============================] - 0s 2ms/step - loss: 0.2461 - mae: 0.3969

13/13 [==============================] - 0s 17ms/step - loss: 0.2461 - mae: 0.3969 - val_loss: 0.1090 - val_mae: 0.2590
Epoch 12/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2354 - mae: 0.4055
13/13 [==============================] - 0s 3ms/step - loss: 0.2060 - mae: 0.3636

13/13 [==============================] - 0s 19ms/step - loss: 0.2060 - mae: 0.3636 - val_loss: 0.0965 - val_mae: 0.2402
Epoch 13/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1575 - mae: 0.3308
13/13 [==============================] - 0s 2ms/step - loss: 0.2114 - mae: 0.3742

13/13 [==============================] - 0s 19ms/step - loss: 0.2114 - mae: 0.3742 - val_loss: 0.0851 - val_mae: 0.2241
Epoch 14/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1701 - mae: 0.3340
13/13 [==============================] - 0s 3ms/step - loss: 0.2090 - mae: 0.3643

13/13 [==============================] - 0s 19ms/step - loss: 0.2090 - mae: 0.3643 - val_loss: 0.0760 - val_mae: 0.2093
Epoch 15/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1643 - mae: 0.3291
13/13 [==============================] - 0s 2ms/step - loss: 0.2220 - mae: 0.3833

13/13 [==============================] - 0s 22ms/step - loss: 0.2220 - mae: 0.3833 - val_loss: 0.0681 - val_mae: 0.1968
Epoch 16/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2778 - mae: 0.4401
13/13 [==============================] - 0s 3ms/step - loss: 0.1936 - mae: 0.3469

13/13 [==============================] - 0s 21ms/step - loss: 0.1936 - mae: 0.3469 - val_loss: 0.0631 - val_mae: 0.1899
Epoch 17/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3351 - mae: 0.4700
13/13 [==============================] - 0s 3ms/step - loss: 0.1789 - mae: 0.3396

13/13 [==============================] - 0s 18ms/step - loss: 0.1789 - mae: 0.3396 - val_loss: 0.0582 - val_mae: 0.1834
Epoch 18/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2001 - mae: 0.3476
13/13 [==============================] - 0s 3ms/step - loss: 0.1669 - mae: 0.3249

13/13 [==============================] - 0s 19ms/step - loss: 0.1669 - mae: 0.3249 - val_loss: 0.0562 - val_mae: 0.1798
Epoch 19/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2107 - mae: 0.3640
13/13 [==============================] - 0s 3ms/step - loss: 0.1729 - mae: 0.3179

13/13 [==============================] - 0s 21ms/step - loss: 0.1729 - mae: 0.3179 - val_loss: 0.0529 - val_mae: 0.1747
Epoch 20/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1089 - mae: 0.2691
13/13 [==============================] - 0s 2ms/step - loss: 0.1690 - mae: 0.3233

13/13 [==============================] - 0s 18ms/step - loss: 0.1690 - mae: 0.3233 - val_loss: 0.0493 - val_mae: 0.1693
Epoch 21/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1012 - mae: 0.2555
13/13 [==============================] - 0s 3ms/step - loss: 0.1631 - mae: 0.3172

13/13 [==============================] - 0s 23ms/step - loss: 0.1631 - mae: 0.3172 - val_loss: 0.0459 - val_mae: 0.1644
Epoch 22/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1286 - mae: 0.2994
13/13 [==============================] - 0s 3ms/step - loss: 0.1688 - mae: 0.3278

13/13 [==============================] - 0s 25ms/step - loss: 0.1688 - mae: 0.3278 - val_loss: 0.0439 - val_mae: 0.1601
Epoch 23/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2304 - mae: 0.3994
13/13 [==============================] - 0s 3ms/step - loss: 0.1753 - mae: 0.3252

13/13 [==============================] - 0s 20ms/step - loss: 0.1753 - mae: 0.3252 - val_loss: 0.0414 - val_mae: 0.1561
Epoch 24/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1741 - mae: 0.3212
13/13 [==============================] - 0s 2ms/step - loss: 0.1613 - mae: 0.3186

13/13 [==============================] - 0s 19ms/step - loss: 0.1613 - mae: 0.3186 - val_loss: 0.0395 - val_mae: 0.1524
Epoch 25/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1129 - mae: 0.2674
13/13 [==============================] - 0s 3ms/step - loss: 0.1618 - mae: 0.3123

13/13 [==============================] - 0s 21ms/step - loss: 0.1618 - mae: 0.3123 - val_loss: 0.0385 - val_mae: 0.1500
Epoch 26/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2533 - mae: 0.3936
13/13 [==============================] - 0s 4ms/step - loss: 0.1621 - mae: 0.3126

13/13 [==============================] - 0s 28ms/step - loss: 0.1621 - mae: 0.3126 - val_loss: 0.0362 - val_mae: 0.1453
Epoch 27/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2425 - mae: 0.3711
13/13 [==============================] - 0s 4ms/step - loss: 0.1621 - mae: 0.3066

13/13 [==============================] - 0s 32ms/step - loss: 0.1621 - mae: 0.3066 - val_loss: 0.0346 - val_mae: 0.1421
Epoch 28/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1383 - mae: 0.2976
13/13 [==============================] - 0s 4ms/step - loss: 0.1473 - mae: 0.2987

13/13 [==============================] - 0s 26ms/step - loss: 0.1473 - mae: 0.2987 - val_loss: 0.0337 - val_mae: 0.1402
Epoch 29/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1667 - mae: 0.3200
13/13 [==============================] - 0s 4ms/step - loss: 0.1548 - mae: 0.2998

13/13 [==============================] - 0s 20ms/step - loss: 0.1548 - mae: 0.2998 - val_loss: 0.0323 - val_mae: 0.1373
Epoch 30/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1277 - mae: 0.2649
13/13 [==============================] - 0s 3ms/step - loss: 0.1448 - mae: 0.2922

13/13 [==============================] - 0s 28ms/step - loss: 0.1448 - mae: 0.2922 - val_loss: 0.0317 - val_mae: 0.1360
Epoch 31/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0536 - mae: 0.1952
13/13 [==============================] - 0s 3ms/step - loss: 0.1335 - mae: 0.2927

13/13 [==============================] - 0s 19ms/step - loss: 0.1335 - mae: 0.2927 - val_loss: 0.0309 - val_mae: 0.1341
Epoch 32/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1266 - mae: 0.2713
13/13 [==============================] - 0s 3ms/step - loss: 0.1507 - mae: 0.3006

13/13 [==============================] - 0s 21ms/step - loss: 0.1507 - mae: 0.3006 - val_loss: 0.0299 - val_mae: 0.1318
Epoch 33/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1166 - mae: 0.2911
13/13 [==============================] - 0s 3ms/step - loss: 0.1254 - mae: 0.2823

13/13 [==============================] - 0s 19ms/step - loss: 0.1254 - mae: 0.2823 - val_loss: 0.0296 - val_mae: 0.1309
Epoch 34/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1118 - mae: 0.2656
13/13 [==============================] - 0s 3ms/step - loss: 0.1375 - mae: 0.2824

13/13 [==============================] - 0s 22ms/step - loss: 0.1375 - mae: 0.2824 - val_loss: 0.0290 - val_mae: 0.1296
Epoch 35/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1227 - mae: 0.2782
13/13 [==============================] - 0s 2ms/step - loss: 0.1214 - mae: 0.2793

13/13 [==============================] - 0s 18ms/step - loss: 0.1214 - mae: 0.2793 - val_loss: 0.0281 - val_mae: 0.1273
Epoch 36/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1329 - mae: 0.3090
13/13 [==============================] - 0s 2ms/step - loss: 0.1221 - mae: 0.2770

13/13 [==============================] - 0s 20ms/step - loss: 0.1221 - mae: 0.2770 - val_loss: 0.0276 - val_mae: 0.1259
Epoch 37/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1242 - mae: 0.2660
13/13 [==============================] - 0s 2ms/step - loss: 0.1186 - mae: 0.2695

13/13 [==============================] - 0s 18ms/step - loss: 0.1186 - mae: 0.2695 - val_loss: 0.0271 - val_mae: 0.1247
Epoch 38/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0794 - mae: 0.2195
13/13 [==============================] - 0s 3ms/step - loss: 0.1203 - mae: 0.2774

13/13 [==============================] - 0s 21ms/step - loss: 0.1203 - mae: 0.2774 - val_loss: 0.0268 - val_mae: 0.1237
Epoch 39/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1677 - mae: 0.3249
13/13 [==============================] - 0s 2ms/step - loss: 0.1278 - mae: 0.2835

13/13 [==============================] - 0s 19ms/step - loss: 0.1278 - mae: 0.2835 - val_loss: 0.0266 - val_mae: 0.1229
Epoch 40/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1414 - mae: 0.2856
13/13 [==============================] - 0s 3ms/step - loss: 0.1363 - mae: 0.2854

13/13 [==============================] - 0s 19ms/step - loss: 0.1363 - mae: 0.2854 - val_loss: 0.0263 - val_mae: 0.1220
Epoch 41/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1610 - mae: 0.3260
13/13 [==============================] - 0s 3ms/step - loss: 0.1337 - mae: 0.2897

13/13 [==============================] - 0s 20ms/step - loss: 0.1337 - mae: 0.2897 - val_loss: 0.0258 - val_mae: 0.1208
Epoch 42/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1256 - mae: 0.2714
13/13 [==============================] - 0s 2ms/step - loss: 0.1253 - mae: 0.2811

13/13 [==============================] - 0s 20ms/step - loss: 0.1253 - mae: 0.2811 - val_loss: 0.0251 - val_mae: 0.1197
Epoch 43/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1347 - mae: 0.2911
13/13 [==============================] - 0s 3ms/step - loss: 0.1130 - mae: 0.2687

13/13 [==============================] - 0s 19ms/step - loss: 0.1130 - mae: 0.2687 - val_loss: 0.0248 - val_mae: 0.1188
Epoch 44/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1743 - mae: 0.3470
13/13 [==============================] - 0s 3ms/step - loss: 0.1228 - mae: 0.2789

13/13 [==============================] - 0s 19ms/step - loss: 0.1228 - mae: 0.2789 - val_loss: 0.0244 - val_mae: 0.1178
Epoch 45/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1014 - mae: 0.2618
13/13 [==============================] - 0s 3ms/step - loss: 0.1082 - mae: 0.2570

13/13 [==============================] - 0s 20ms/step - loss: 0.1082 - mae: 0.2570 - val_loss: 0.0238 - val_mae: 0.1164
Epoch 46/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0648 - mae: 0.1987
13/13 [==============================] - 0s 2ms/step - loss: 0.1235 - mae: 0.2664

13/13 [==============================] - 0s 19ms/step - loss: 0.1235 - mae: 0.2664 - val_loss: 0.0234 - val_mae: 0.1151
Epoch 47/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1248 - mae: 0.2903
13/13 [==============================] - 0s 3ms/step - loss: 0.1105 - mae: 0.2686

13/13 [==============================] - 0s 18ms/step - loss: 0.1105 - mae: 0.2686 - val_loss: 0.0232 - val_mae: 0.1143
Epoch 48/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0584 - mae: 0.2033
13/13 [==============================] - 0s 3ms/step - loss: 0.1082 - mae: 0.2572

13/13 [==============================] - 0s 18ms/step - loss: 0.1082 - mae: 0.2572 - val_loss: 0.0229 - val_mae: 0.1136
Epoch 49/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0950 - mae: 0.2335
13/13 [==============================] - 0s 2ms/step - loss: 0.1110 - mae: 0.2575

13/13 [==============================] - 0s 20ms/step - loss: 0.1110 - mae: 0.2575 - val_loss: 0.0226 - val_mae: 0.1129
Epoch 50/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1066 - mae: 0.2670
13/13 [==============================] - 0s 2ms/step - loss: 0.1084 - mae: 0.2626

13/13 [==============================] - 0s 20ms/step - loss: 0.1084 - mae: 0.2626 - val_loss: 0.0223 - val_mae: 0.1119

Run completed: runs/2022-12-07T02-39-27Z

Training run 22/52 (flags = list(32, 10, 0.01, 50, 30, "sigmoid", "tanh", 0.2, 0.6)) 
Using run directory runs/2022-12-07T02-39-54Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

1/8 [==>...........................] - ETA: 3s - loss: 0.7150 - mae: 0.7374
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0017s vs `on_train_batch_end` time: 0.0040s). Check your callbacks.

8/8 [==============================] - 0s 2ms/step - loss: 0.5622 - mae: 0.6081

8/8 [==============================] - 1s 108ms/step - loss: 0.5622 - mae: 0.6081 - val_loss: 0.0164 - val_mae: 0.1101
Epoch 2/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3969 - mae: 0.5328
8/8 [==============================] - 0s 2ms/step - loss: 0.4899 - mae: 0.5757

8/8 [==============================] - 0s 31ms/step - loss: 0.4899 - mae: 0.5757 - val_loss: 0.0073 - val_mae: 0.0660
Epoch 3/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3176 - mae: 0.4611
8/8 [==============================] - 0s 6ms/step - loss: 0.3521 - mae: 0.4786

8/8 [==============================] - 0s 64ms/step - loss: 0.3521 - mae: 0.4786 - val_loss: 0.0166 - val_mae: 0.1120
Epoch 4/30

1/8 [==>...........................] - ETA: 0s - loss: 0.4263 - mae: 0.5445
8/8 [==============================] - 0s 3ms/step - loss: 0.2875 - mae: 0.4366

8/8 [==============================] - 0s 35ms/step - loss: 0.2875 - mae: 0.4366 - val_loss: 0.0072 - val_mae: 0.0649
Epoch 5/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3120 - mae: 0.4475
8/8 [==============================] - 0s 2ms/step - loss: 0.2809 - mae: 0.4353

8/8 [==============================] - 0s 33ms/step - loss: 0.2809 - mae: 0.4353 - val_loss: 0.0088 - val_mae: 0.0742
Epoch 6/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3112 - mae: 0.4111
8/8 [==============================] - 0s 4ms/step - loss: 0.2395 - mae: 0.3908

8/8 [==============================] - 0s 62ms/step - loss: 0.2395 - mae: 0.3908 - val_loss: 0.0074 - val_mae: 0.0662
Epoch 7/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2645 - mae: 0.3938
8/8 [==============================] - 0s 4ms/step - loss: 0.2511 - mae: 0.4018

8/8 [==============================] - 0s 45ms/step - loss: 0.2511 - mae: 0.4018 - val_loss: 0.0106 - val_mae: 0.0844
Epoch 8/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2573 - mae: 0.4161
8/8 [==============================] - 0s 3ms/step - loss: 0.2106 - mae: 0.3646

8/8 [==============================] - 0s 30ms/step - loss: 0.2106 - mae: 0.3646 - val_loss: 0.0122 - val_mae: 0.0925
Epoch 9/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2477 - mae: 0.4051
8/8 [==============================] - 0s 3ms/step - loss: 0.2056 - mae: 0.3724

8/8 [==============================] - 0s 31ms/step - loss: 0.2056 - mae: 0.3724 - val_loss: 0.0079 - val_mae: 0.0675
Epoch 10/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2517 - mae: 0.3960
8/8 [==============================] - 0s 3ms/step - loss: 0.1980 - mae: 0.3510

8/8 [==============================] - 0s 33ms/step - loss: 0.1980 - mae: 0.3510 - val_loss: 0.0090 - val_mae: 0.0732
Epoch 11/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2409 - mae: 0.4197
8/8 [==============================] - 0s 3ms/step - loss: 0.1784 - mae: 0.3311

8/8 [==============================] - 0s 31ms/step - loss: 0.1784 - mae: 0.3311 - val_loss: 0.0081 - val_mae: 0.0678
Epoch 12/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1393 - mae: 0.3084
8/8 [==============================] - 0s 3ms/step - loss: 0.1725 - mae: 0.3299

8/8 [==============================] - 0s 33ms/step - loss: 0.1725 - mae: 0.3299 - val_loss: 0.0091 - val_mae: 0.0728
Epoch 13/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1177 - mae: 0.2741
8/8 [==============================] - 0s 3ms/step - loss: 0.1547 - mae: 0.3160

8/8 [==============================] - 0s 32ms/step - loss: 0.1547 - mae: 0.3160 - val_loss: 0.0086 - val_mae: 0.0698
Epoch 14/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1444 - mae: 0.2933
8/8 [==============================] - 0s 3ms/step - loss: 0.1488 - mae: 0.3118

8/8 [==============================] - 0s 36ms/step - loss: 0.1488 - mae: 0.3118 - val_loss: 0.0111 - val_mae: 0.0855
Epoch 15/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1103 - mae: 0.2445
8/8 [==============================] - 0s 3ms/step - loss: 0.1385 - mae: 0.2910

8/8 [==============================] - 0s 28ms/step - loss: 0.1385 - mae: 0.2910 - val_loss: 0.0109 - val_mae: 0.0842
Epoch 16/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1616 - mae: 0.3317
8/8 [==============================] - 0s 3ms/step - loss: 0.1380 - mae: 0.2903

8/8 [==============================] - 0s 33ms/step - loss: 0.1380 - mae: 0.2903 - val_loss: 0.0090 - val_mae: 0.0720
Epoch 17/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1315 - mae: 0.2686
8/8 [==============================] - 0s 3ms/step - loss: 0.1276 - mae: 0.2884

8/8 [==============================] - 0s 33ms/step - loss: 0.1276 - mae: 0.2884 - val_loss: 0.0100 - val_mae: 0.0782
Epoch 18/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1055 - mae: 0.2609
8/8 [==============================] - 0s 3ms/step - loss: 0.1204 - mae: 0.2755

8/8 [==============================] - 0s 29ms/step - loss: 0.1204 - mae: 0.2755 - val_loss: 0.0124 - val_mae: 0.0922
Epoch 19/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1695 - mae: 0.3347
8/8 [==============================] - 0s 3ms/step - loss: 0.1297 - mae: 0.2807

8/8 [==============================] - 0s 35ms/step - loss: 0.1297 - mae: 0.2807 - val_loss: 0.0121 - val_mae: 0.0902
Epoch 20/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1289 - mae: 0.2898
8/8 [==============================] - 0s 3ms/step - loss: 0.1212 - mae: 0.2802

8/8 [==============================] - 0s 31ms/step - loss: 0.1212 - mae: 0.2802 - val_loss: 0.0123 - val_mae: 0.0918
Epoch 21/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0910 - mae: 0.2379
8/8 [==============================] - 0s 3ms/step - loss: 0.1114 - mae: 0.2612

8/8 [==============================] - 0s 31ms/step - loss: 0.1114 - mae: 0.2612 - val_loss: 0.0122 - val_mae: 0.0912
Epoch 22/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0916 - mae: 0.2536
8/8 [==============================] - 0s 3ms/step - loss: 0.0976 - mae: 0.2517

8/8 [==============================] - 0s 33ms/step - loss: 0.0976 - mae: 0.2517 - val_loss: 0.0087 - val_mae: 0.0686
Epoch 23/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0837 - mae: 0.2270
8/8 [==============================] - 0s 3ms/step - loss: 0.0847 - mae: 0.2339

8/8 [==============================] - 0s 33ms/step - loss: 0.0847 - mae: 0.2339 - val_loss: 0.0087 - val_mae: 0.0686
Epoch 24/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0998 - mae: 0.2566
8/8 [==============================] - 0s 3ms/step - loss: 0.0895 - mae: 0.2462

8/8 [==============================] - 0s 31ms/step - loss: 0.0895 - mae: 0.2462 - val_loss: 0.0091 - val_mae: 0.0715
Epoch 25/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0851 - mae: 0.2101
8/8 [==============================] - 0s 3ms/step - loss: 0.0950 - mae: 0.2449

8/8 [==============================] - 0s 33ms/step - loss: 0.0950 - mae: 0.2449 - val_loss: 0.0094 - val_mae: 0.0738
Epoch 26/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1230 - mae: 0.2564
8/8 [==============================] - 0s 3ms/step - loss: 0.0858 - mae: 0.2310

8/8 [==============================] - 0s 31ms/step - loss: 0.0858 - mae: 0.2310 - val_loss: 0.0097 - val_mae: 0.0762
Epoch 27/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0695 - mae: 0.2053
8/8 [==============================] - 0s 3ms/step - loss: 0.0714 - mae: 0.2119

8/8 [==============================] - 0s 34ms/step - loss: 0.0714 - mae: 0.2119 - val_loss: 0.0106 - val_mae: 0.0814
Epoch 28/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0826 - mae: 0.2310
8/8 [==============================] - 0s 3ms/step - loss: 0.0692 - mae: 0.2108

8/8 [==============================] - 0s 31ms/step - loss: 0.0692 - mae: 0.2108 - val_loss: 0.0097 - val_mae: 0.0756
Epoch 29/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0733 - mae: 0.2207
8/8 [==============================] - 0s 3ms/step - loss: 0.0700 - mae: 0.2158

8/8 [==============================] - 0s 31ms/step - loss: 0.0700 - mae: 0.2158 - val_loss: 0.0107 - val_mae: 0.0811
Epoch 30/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0485 - mae: 0.1736
8/8 [==============================] - 0s 3ms/step - loss: 0.0733 - mae: 0.2100

8/8 [==============================] - 0s 36ms/step - loss: 0.0733 - mae: 0.2100 - val_loss: 0.0109 - val_mae: 0.0819

Run completed: runs/2022-12-07T02-39-54Z

Training run 23/52 (flags = list(64, 10, 0.001, 30, 50, "relu", "tanh", 0.6, 0.6)) 
Using run directory runs/2022-12-07T02-40-15Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

 1/13 [=>............................] - ETA: 5s - loss: 0.9571 - mae: 0.8028
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0011s vs `on_train_batch_end` time: 0.0045s). Check your callbacks.

13/13 [==============================] - 0s 2ms/step - loss: 1.4171 - mae: 0.9394

13/13 [==============================] - 1s 64ms/step - loss: 1.4171 - mae: 0.9394 - val_loss: 0.3436 - val_mae: 0.5521
Epoch 2/50

 1/13 [=>............................] - ETA: 0s - loss: 1.6723 - mae: 0.9385
13/13 [==============================] - 0s 2ms/step - loss: 1.1411 - mae: 0.8594

13/13 [==============================] - 0s 23ms/step - loss: 1.1411 - mae: 0.8594 - val_loss: 0.2435 - val_mae: 0.4588
Epoch 3/50

 1/13 [=>............................] - ETA: 0s - loss: 0.9973 - mae: 0.8795
12/13 [==========================>...] - ETA: 0s - loss: 1.0048 - mae: 0.7945
13/13 [==============================] - 0s 5ms/step - loss: 1.0028 - mae: 0.7943

13/13 [==============================] - 0s 37ms/step - loss: 1.0028 - mae: 0.7943 - val_loss: 0.1864 - val_mae: 0.3938
Epoch 4/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7207 - mae: 0.7165
13/13 [==============================] - 0s 2ms/step - loss: 1.0473 - mae: 0.8098

13/13 [==============================] - 0s 20ms/step - loss: 1.0473 - mae: 0.8098 - val_loss: 0.1417 - val_mae: 0.3360
Epoch 5/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7726 - mae: 0.7513
13/13 [==============================] - 0s 3ms/step - loss: 0.8634 - mae: 0.7508

13/13 [==============================] - 0s 29ms/step - loss: 0.8634 - mae: 0.7508 - val_loss: 0.1018 - val_mae: 0.2731
Epoch 6/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7638 - mae: 0.6866
13/13 [==============================] - 0s 4ms/step - loss: 0.8624 - mae: 0.7450

13/13 [==============================] - 0s 36ms/step - loss: 0.8624 - mae: 0.7450 - val_loss: 0.0786 - val_mae: 0.2278
Epoch 7/50

 1/13 [=>............................] - ETA: 0s - loss: 0.8664 - mae: 0.7413
13/13 [==============================] - 0s 3ms/step - loss: 0.8944 - mae: 0.7412

13/13 [==============================] - 0s 17ms/step - loss: 0.8944 - mae: 0.7412 - val_loss: 0.0712 - val_mae: 0.2176
Epoch 8/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5314 - mae: 0.6091
13/13 [==============================] - 0s 3ms/step - loss: 0.8994 - mae: 0.7553

13/13 [==============================] - 0s 19ms/step - loss: 0.8994 - mae: 0.7553 - val_loss: 0.0573 - val_mae: 0.1874
Epoch 9/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6616 - mae: 0.6700
13/13 [==============================] - 0s 3ms/step - loss: 0.7684 - mae: 0.6775

13/13 [==============================] - 0s 20ms/step - loss: 0.7684 - mae: 0.6775 - val_loss: 0.0485 - val_mae: 0.1679
Epoch 10/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7868 - mae: 0.7195
13/13 [==============================] - 0s 3ms/step - loss: 0.7307 - mae: 0.6750

13/13 [==============================] - 0s 20ms/step - loss: 0.7307 - mae: 0.6750 - val_loss: 0.0430 - val_mae: 0.1537
Epoch 11/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4662 - mae: 0.5275
13/13 [==============================] - 0s 3ms/step - loss: 0.6465 - mae: 0.6347

13/13 [==============================] - 0s 19ms/step - loss: 0.6465 - mae: 0.6347 - val_loss: 0.0417 - val_mae: 0.1510
Epoch 12/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7137 - mae: 0.6384
13/13 [==============================] - 0s 2ms/step - loss: 0.6623 - mae: 0.6448

13/13 [==============================] - 0s 21ms/step - loss: 0.6623 - mae: 0.6448 - val_loss: 0.0404 - val_mae: 0.1480
Epoch 13/50

 1/13 [=>............................] - ETA: 0s - loss: 1.0457 - mae: 0.7898
13/13 [==============================] - 0s 3ms/step - loss: 0.7964 - mae: 0.6858

13/13 [==============================] - 0s 19ms/step - loss: 0.7964 - mae: 0.6858 - val_loss: 0.0366 - val_mae: 0.1398
Epoch 14/50

 1/13 [=>............................] - ETA: 0s - loss: 0.9530 - mae: 0.7535
13/13 [==============================] - 0s 3ms/step - loss: 0.6822 - mae: 0.6461

13/13 [==============================] - 0s 19ms/step - loss: 0.6822 - mae: 0.6461 - val_loss: 0.0370 - val_mae: 0.1410
Epoch 15/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5719 - mae: 0.6079
13/13 [==============================] - 0s 3ms/step - loss: 0.5831 - mae: 0.6036

13/13 [==============================] - 0s 20ms/step - loss: 0.5831 - mae: 0.6036 - val_loss: 0.0343 - val_mae: 0.1344
Epoch 16/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7628 - mae: 0.7217
13/13 [==============================] - 0s 3ms/step - loss: 0.6050 - mae: 0.6155

13/13 [==============================] - 0s 20ms/step - loss: 0.6050 - mae: 0.6155 - val_loss: 0.0326 - val_mae: 0.1297
Epoch 17/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7548 - mae: 0.6913
13/13 [==============================] - 0s 3ms/step - loss: 0.6367 - mae: 0.6124

13/13 [==============================] - 0s 18ms/step - loss: 0.6367 - mae: 0.6124 - val_loss: 0.0315 - val_mae: 0.1269
Epoch 18/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4112 - mae: 0.5260
13/13 [==============================] - 0s 3ms/step - loss: 0.5316 - mae: 0.5797

13/13 [==============================] - 0s 20ms/step - loss: 0.5316 - mae: 0.5797 - val_loss: 0.0298 - val_mae: 0.1229
Epoch 19/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5499 - mae: 0.6052
13/13 [==============================] - 0s 3ms/step - loss: 0.6131 - mae: 0.6023

13/13 [==============================] - 0s 19ms/step - loss: 0.6131 - mae: 0.6023 - val_loss: 0.0287 - val_mae: 0.1202
Epoch 20/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6271 - mae: 0.6522
13/13 [==============================] - 0s 3ms/step - loss: 0.5923 - mae: 0.6118

13/13 [==============================] - 0s 19ms/step - loss: 0.5923 - mae: 0.6118 - val_loss: 0.0285 - val_mae: 0.1188
Epoch 21/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6671 - mae: 0.6943
13/13 [==============================] - 0s 3ms/step - loss: 0.5081 - mae: 0.5857

13/13 [==============================] - 0s 19ms/step - loss: 0.5081 - mae: 0.5857 - val_loss: 0.0278 - val_mae: 0.1170
Epoch 22/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3449 - mae: 0.4247
13/13 [==============================] - 0s 3ms/step - loss: 0.4129 - mae: 0.5038

13/13 [==============================] - 0s 18ms/step - loss: 0.4129 - mae: 0.5038 - val_loss: 0.0271 - val_mae: 0.1154
Epoch 23/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3452 - mae: 0.4888
13/13 [==============================] - 0s 2ms/step - loss: 0.5270 - mae: 0.5737

13/13 [==============================] - 0s 21ms/step - loss: 0.5270 - mae: 0.5737 - val_loss: 0.0254 - val_mae: 0.1115
Epoch 24/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3914 - mae: 0.5081
13/13 [==============================] - 0s 3ms/step - loss: 0.4353 - mae: 0.5236

13/13 [==============================] - 0s 19ms/step - loss: 0.4353 - mae: 0.5236 - val_loss: 0.0260 - val_mae: 0.1136
Epoch 25/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6402 - mae: 0.6377
13/13 [==============================] - 0s 3ms/step - loss: 0.5247 - mae: 0.5882

13/13 [==============================] - 0s 21ms/step - loss: 0.5247 - mae: 0.5882 - val_loss: 0.0250 - val_mae: 0.1106
Epoch 26/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4851 - mae: 0.5417
13/13 [==============================] - 0s 3ms/step - loss: 0.4760 - mae: 0.5380

13/13 [==============================] - 0s 19ms/step - loss: 0.4760 - mae: 0.5380 - val_loss: 0.0242 - val_mae: 0.1083
Epoch 27/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2826 - mae: 0.4333
13/13 [==============================] - 0s 3ms/step - loss: 0.4427 - mae: 0.5212

13/13 [==============================] - 0s 19ms/step - loss: 0.4427 - mae: 0.5212 - val_loss: 0.0232 - val_mae: 0.1062
Epoch 28/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3317 - mae: 0.4865
13/13 [==============================] - 0s 2ms/step - loss: 0.4781 - mae: 0.5460

13/13 [==============================] - 0s 19ms/step - loss: 0.4781 - mae: 0.5460 - val_loss: 0.0225 - val_mae: 0.1049
Epoch 29/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6221 - mae: 0.6483
13/13 [==============================] - 0s 3ms/step - loss: 0.4584 - mae: 0.5383

13/13 [==============================] - 0s 21ms/step - loss: 0.4584 - mae: 0.5383 - val_loss: 0.0221 - val_mae: 0.1038
Epoch 30/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3860 - mae: 0.4881
13/13 [==============================] - 0s 3ms/step - loss: 0.3482 - mae: 0.4784

13/13 [==============================] - 0s 19ms/step - loss: 0.3482 - mae: 0.4784 - val_loss: 0.0221 - val_mae: 0.1033
Epoch 31/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3862 - mae: 0.4697
13/13 [==============================] - 0s 3ms/step - loss: 0.4877 - mae: 0.5505

13/13 [==============================] - 0s 20ms/step - loss: 0.4877 - mae: 0.5505 - val_loss: 0.0231 - val_mae: 0.1082
Epoch 32/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4509 - mae: 0.5677
13/13 [==============================] - 0s 2ms/step - loss: 0.4674 - mae: 0.5370

13/13 [==============================] - 0s 20ms/step - loss: 0.4674 - mae: 0.5370 - val_loss: 0.0224 - val_mae: 0.1058
Epoch 33/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4187 - mae: 0.5477
13/13 [==============================] - 0s 3ms/step - loss: 0.4312 - mae: 0.5220

13/13 [==============================] - 0s 20ms/step - loss: 0.4312 - mae: 0.5220 - val_loss: 0.0223 - val_mae: 0.1070
Epoch 34/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4498 - mae: 0.5226
13/13 [==============================] - 0s 3ms/step - loss: 0.3761 - mae: 0.4797

13/13 [==============================] - 0s 18ms/step - loss: 0.3761 - mae: 0.4797 - val_loss: 0.0216 - val_mae: 0.1039
Epoch 35/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3512 - mae: 0.4845
13/13 [==============================] - 0s 3ms/step - loss: 0.3617 - mae: 0.4742

13/13 [==============================] - 0s 20ms/step - loss: 0.3617 - mae: 0.4742 - val_loss: 0.0214 - val_mae: 0.1035
Epoch 36/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3787 - mae: 0.4793
13/13 [==============================] - 0s 3ms/step - loss: 0.3867 - mae: 0.5125

13/13 [==============================] - 0s 20ms/step - loss: 0.3867 - mae: 0.5125 - val_loss: 0.0213 - val_mae: 0.1037
Epoch 37/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4157 - mae: 0.5145
13/13 [==============================] - 0s 3ms/step - loss: 0.3289 - mae: 0.4580

13/13 [==============================] - 0s 19ms/step - loss: 0.3289 - mae: 0.4580 - val_loss: 0.0210 - val_mae: 0.1031
Epoch 38/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3526 - mae: 0.5008
13/13 [==============================] - 0s 3ms/step - loss: 0.3775 - mae: 0.4855

13/13 [==============================] - 0s 20ms/step - loss: 0.3775 - mae: 0.4855 - val_loss: 0.0208 - val_mae: 0.1026
Epoch 39/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3515 - mae: 0.4771
13/13 [==============================] - 0s 3ms/step - loss: 0.3343 - mae: 0.4656

13/13 [==============================] - 1s 50ms/step - loss: 0.3343 - mae: 0.4656 - val_loss: 0.0199 - val_mae: 0.0984
Epoch 40/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3558 - mae: 0.4555
13/13 [==============================] - 0s 3ms/step - loss: 0.3428 - mae: 0.4622

13/13 [==============================] - 0s 19ms/step - loss: 0.3428 - mae: 0.4622 - val_loss: 0.0198 - val_mae: 0.0985
Epoch 41/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3897 - mae: 0.4922
13/13 [==============================] - 0s 3ms/step - loss: 0.3618 - mae: 0.4672

13/13 [==============================] - 0s 19ms/step - loss: 0.3618 - mae: 0.4672 - val_loss: 0.0194 - val_mae: 0.0971
Epoch 42/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3472 - mae: 0.5299
13/13 [==============================] - 0s 3ms/step - loss: 0.3236 - mae: 0.4586

13/13 [==============================] - 0s 18ms/step - loss: 0.3236 - mae: 0.4586 - val_loss: 0.0190 - val_mae: 0.0957
Epoch 43/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5079 - mae: 0.5751
13/13 [==============================] - 0s 3ms/step - loss: 0.3459 - mae: 0.4557

13/13 [==============================] - 0s 19ms/step - loss: 0.3459 - mae: 0.4557 - val_loss: 0.0184 - val_mae: 0.0923
Epoch 44/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2874 - mae: 0.4304
13/13 [==============================] - 0s 3ms/step - loss: 0.2867 - mae: 0.4393

13/13 [==============================] - 0s 20ms/step - loss: 0.2867 - mae: 0.4393 - val_loss: 0.0184 - val_mae: 0.0923
Epoch 45/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2814 - mae: 0.4151
13/13 [==============================] - 0s 3ms/step - loss: 0.2958 - mae: 0.4261

13/13 [==============================] - 0s 19ms/step - loss: 0.2958 - mae: 0.4261 - val_loss: 0.0182 - val_mae: 0.0923
Epoch 46/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3223 - mae: 0.4761
13/13 [==============================] - 0s 2ms/step - loss: 0.2853 - mae: 0.4179

13/13 [==============================] - 0s 18ms/step - loss: 0.2853 - mae: 0.4179 - val_loss: 0.0183 - val_mae: 0.0928
Epoch 47/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2748 - mae: 0.3946
13/13 [==============================] - 0s 3ms/step - loss: 0.2928 - mae: 0.4325

13/13 [==============================] - 0s 19ms/step - loss: 0.2928 - mae: 0.4325 - val_loss: 0.0187 - val_mae: 0.0956
Epoch 48/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3651 - mae: 0.4751
13/13 [==============================] - 0s 3ms/step - loss: 0.3262 - mae: 0.4624

13/13 [==============================] - 0s 21ms/step - loss: 0.3262 - mae: 0.4624 - val_loss: 0.0186 - val_mae: 0.0960
Epoch 49/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2283 - mae: 0.3471
13/13 [==============================] - 0s 4ms/step - loss: 0.2812 - mae: 0.4170

13/13 [==============================] - 0s 22ms/step - loss: 0.2812 - mae: 0.4170 - val_loss: 0.0183 - val_mae: 0.0956
Epoch 50/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4103 - mae: 0.5219
13/13 [==============================] - 0s 2ms/step - loss: 0.3391 - mae: 0.4572

13/13 [==============================] - 0s 19ms/step - loss: 0.3391 - mae: 0.4572 - val_loss: 0.0180 - val_mae: 0.0943

Run completed: runs/2022-12-07T02-40-15Z

Training run 24/52 (flags = list(64, 50, 0.01, 30, 50, "sigmoid", "sigmoid", 0.6, 0.2)) 
Using run directory runs/2022-12-07T02-40-43Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

 1/13 [=>............................] - ETA: 11s - loss: 0.7552 - mae: 0.7574
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0034s vs `on_train_batch_end` time: 0.0053s). Check your callbacks.

13/13 [==============================] - 1s 5ms/step - loss: 0.2983 - mae: 0.4317

13/13 [==============================] - 2s 82ms/step - loss: 0.2983 - mae: 0.4317 - val_loss: 0.0330 - val_mae: 0.1449
Epoch 2/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2807 - mae: 0.4237
13/13 [==============================] - 0s 3ms/step - loss: 0.2417 - mae: 0.3952

13/13 [==============================] - 0s 21ms/step - loss: 0.2417 - mae: 0.3952 - val_loss: 0.0291 - val_mae: 0.1379
Epoch 3/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2685 - mae: 0.4052
13/13 [==============================] - 0s 3ms/step - loss: 0.1859 - mae: 0.3428

13/13 [==============================] - 0s 26ms/step - loss: 0.1859 - mae: 0.3428 - val_loss: 0.0362 - val_mae: 0.1525
Epoch 4/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3039 - mae: 0.4306
13/13 [==============================] - 0s 4ms/step - loss: 0.1770 - mae: 0.3283

13/13 [==============================] - 0s 27ms/step - loss: 0.1770 - mae: 0.3283 - val_loss: 0.0397 - val_mae: 0.1644
Epoch 5/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1969 - mae: 0.3870
11/13 [========================>.....] - ETA: 0s - loss: 0.1780 - mae: 0.3404
13/13 [==============================] - 0s 5ms/step - loss: 0.1754 - mae: 0.3384

13/13 [==============================] - 0s 39ms/step - loss: 0.1754 - mae: 0.3384 - val_loss: 0.0259 - val_mae: 0.1304
Epoch 6/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1563 - mae: 0.3128
13/13 [==============================] - 0s 4ms/step - loss: 0.1823 - mae: 0.3453

13/13 [==============================] - 0s 36ms/step - loss: 0.1823 - mae: 0.3453 - val_loss: 0.0242 - val_mae: 0.1249
Epoch 7/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1676 - mae: 0.3471
10/13 [======================>.......] - ETA: 0s - loss: 0.1610 - mae: 0.3212
13/13 [==============================] - 0s 6ms/step - loss: 0.1559 - mae: 0.3163

13/13 [==============================] - 0s 40ms/step - loss: 0.1559 - mae: 0.3163 - val_loss: 0.0241 - val_mae: 0.1228
Epoch 8/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1882 - mae: 0.3366
13/13 [==============================] - 0s 4ms/step - loss: 0.1443 - mae: 0.2978

13/13 [==============================] - 0s 28ms/step - loss: 0.1443 - mae: 0.2978 - val_loss: 0.0241 - val_mae: 0.1233
Epoch 9/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1080 - mae: 0.2618
13/13 [==============================] - 0s 4ms/step - loss: 0.1444 - mae: 0.2992

13/13 [==============================] - 0s 30ms/step - loss: 0.1444 - mae: 0.2992 - val_loss: 0.0283 - val_mae: 0.1355
Epoch 10/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1522 - mae: 0.3067
12/13 [==========================>...] - ETA: 0s - loss: 0.1376 - mae: 0.2985
13/13 [==============================] - 0s 5ms/step - loss: 0.1373 - mae: 0.2985

13/13 [==============================] - 0s 37ms/step - loss: 0.1373 - mae: 0.2985 - val_loss: 0.0218 - val_mae: 0.1184
Epoch 11/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1252 - mae: 0.2837
13/13 [==============================] - 0s 3ms/step - loss: 0.1203 - mae: 0.2784

13/13 [==============================] - 0s 25ms/step - loss: 0.1203 - mae: 0.2784 - val_loss: 0.0299 - val_mae: 0.1431
Epoch 12/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1772 - mae: 0.3390
12/13 [==========================>...] - ETA: 0s - loss: 0.1310 - mae: 0.2826
13/13 [==============================] - 0s 5ms/step - loss: 0.1313 - mae: 0.2833

13/13 [==============================] - 0s 36ms/step - loss: 0.1313 - mae: 0.2833 - val_loss: 0.0201 - val_mae: 0.1127
Epoch 13/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0805 - mae: 0.2339
13/13 [==============================] - 0s 4ms/step - loss: 0.1248 - mae: 0.2872

13/13 [==============================] - 0s 34ms/step - loss: 0.1248 - mae: 0.2872 - val_loss: 0.0330 - val_mae: 0.1525
Epoch 14/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0908 - mae: 0.2425
12/13 [==========================>...] - ETA: 0s - loss: 0.1172 - mae: 0.2743
13/13 [==============================] - 0s 5ms/step - loss: 0.1173 - mae: 0.2749

13/13 [==============================] - 0s 36ms/step - loss: 0.1173 - mae: 0.2749 - val_loss: 0.0188 - val_mae: 0.1077
Epoch 15/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1457 - mae: 0.3097
12/13 [==========================>...] - ETA: 0s - loss: 0.1096 - mae: 0.2693
13/13 [==============================] - 0s 5ms/step - loss: 0.1092 - mae: 0.2688

13/13 [==============================] - 0s 36ms/step - loss: 0.1092 - mae: 0.2688 - val_loss: 0.0190 - val_mae: 0.1093
Epoch 16/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1313 - mae: 0.2991
13/13 [==============================] - 0s 3ms/step - loss: 0.1058 - mae: 0.2577

13/13 [==============================] - 0s 19ms/step - loss: 0.1058 - mae: 0.2577 - val_loss: 0.0177 - val_mae: 0.1045
Epoch 17/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1282 - mae: 0.2771
12/13 [==========================>...] - ETA: 0s - loss: 0.1120 - mae: 0.2646
13/13 [==============================] - 0s 5ms/step - loss: 0.1114 - mae: 0.2635

13/13 [==============================] - 0s 30ms/step - loss: 0.1114 - mae: 0.2635 - val_loss: 0.0189 - val_mae: 0.1095
Epoch 18/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1524 - mae: 0.3128
12/13 [==========================>...] - ETA: 0s - loss: 0.1008 - mae: 0.2583
13/13 [==============================] - 0s 5ms/step - loss: 0.1016 - mae: 0.2594

13/13 [==============================] - 0s 35ms/step - loss: 0.1016 - mae: 0.2594 - val_loss: 0.0304 - val_mae: 0.1469
Epoch 19/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1082 - mae: 0.2411
13/13 [==============================] - 0s 3ms/step - loss: 0.0813 - mae: 0.2268

13/13 [==============================] - 0s 19ms/step - loss: 0.0813 - mae: 0.2268 - val_loss: 0.0167 - val_mae: 0.1005
Epoch 20/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0880 - mae: 0.2456
13/13 [==============================] - 0s 4ms/step - loss: 0.1001 - mae: 0.2527

13/13 [==============================] - 0s 29ms/step - loss: 0.1001 - mae: 0.2527 - val_loss: 0.0179 - val_mae: 0.1066
Epoch 21/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0997 - mae: 0.2414
10/13 [======================>.......] - ETA: 0s - loss: 0.0877 - mae: 0.2351
13/13 [==============================] - 0s 6ms/step - loss: 0.0889 - mae: 0.2377

13/13 [==============================] - 0s 36ms/step - loss: 0.0889 - mae: 0.2377 - val_loss: 0.0163 - val_mae: 0.0994
Epoch 22/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0708 - mae: 0.2217
13/13 [==============================] - 0s 3ms/step - loss: 0.0930 - mae: 0.2435

13/13 [==============================] - 0s 21ms/step - loss: 0.0930 - mae: 0.2435 - val_loss: 0.0203 - val_mae: 0.1127
Epoch 23/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0803 - mae: 0.2318
11/13 [========================>.....] - ETA: 0s - loss: 0.0833 - mae: 0.2317
13/13 [==============================] - 0s 5ms/step - loss: 0.0842 - mae: 0.2331

13/13 [==============================] - 0s 30ms/step - loss: 0.0842 - mae: 0.2331 - val_loss: 0.0168 - val_mae: 0.1019
Epoch 24/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0768 - mae: 0.2312
12/13 [==========================>...] - ETA: 0s - loss: 0.0770 - mae: 0.2205
13/13 [==============================] - 0s 5ms/step - loss: 0.0774 - mae: 0.2213

13/13 [==============================] - 0s 34ms/step - loss: 0.0774 - mae: 0.2213 - val_loss: 0.0187 - val_mae: 0.1077
Epoch 25/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0825 - mae: 0.2343
13/13 [==============================] - 0s 3ms/step - loss: 0.0722 - mae: 0.2155

13/13 [==============================] - 0s 18ms/step - loss: 0.0722 - mae: 0.2155 - val_loss: 0.0180 - val_mae: 0.1056
Epoch 26/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0515 - mae: 0.1666
13/13 [==============================] - 0s 3ms/step - loss: 0.0734 - mae: 0.2171

13/13 [==============================] - 0s 30ms/step - loss: 0.0734 - mae: 0.2171 - val_loss: 0.0154 - val_mae: 0.0964
Epoch 27/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0800 - mae: 0.2363
13/13 [==============================] - 0s 4ms/step - loss: 0.0832 - mae: 0.2309

13/13 [==============================] - 0s 27ms/step - loss: 0.0832 - mae: 0.2309 - val_loss: 0.0166 - val_mae: 0.1011
Epoch 28/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1036 - mae: 0.2490
13/13 [==============================] - 0s 4ms/step - loss: 0.0732 - mae: 0.2130

13/13 [==============================] - 0s 37ms/step - loss: 0.0732 - mae: 0.2130 - val_loss: 0.0149 - val_mae: 0.0949
Epoch 29/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0701 - mae: 0.2065
13/13 [==============================] - 0s 4ms/step - loss: 0.0659 - mae: 0.1994

13/13 [==============================] - 0s 30ms/step - loss: 0.0659 - mae: 0.1994 - val_loss: 0.0164 - val_mae: 0.1021
Epoch 30/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0730 - mae: 0.2014
13/13 [==============================] - 0s 4ms/step - loss: 0.0555 - mae: 0.1885

13/13 [==============================] - 0s 32ms/step - loss: 0.0555 - mae: 0.1885 - val_loss: 0.0144 - val_mae: 0.0927
Epoch 31/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0597 - mae: 0.1880
12/13 [==========================>...] - ETA: 0s - loss: 0.0551 - mae: 0.1828
13/13 [==============================] - 0s 5ms/step - loss: 0.0550 - mae: 0.1829

13/13 [==============================] - 0s 35ms/step - loss: 0.0550 - mae: 0.1829 - val_loss: 0.0145 - val_mae: 0.0927
Epoch 32/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0619 - mae: 0.2075
13/13 [==============================] - 0s 4ms/step - loss: 0.0690 - mae: 0.2137

13/13 [==============================] - 0s 25ms/step - loss: 0.0690 - mae: 0.2137 - val_loss: 0.0147 - val_mae: 0.0947
Epoch 33/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0552 - mae: 0.1937
12/13 [==========================>...] - ETA: 0s - loss: 0.0570 - mae: 0.1927
13/13 [==============================] - 0s 5ms/step - loss: 0.0571 - mae: 0.1932

13/13 [==============================] - 0s 41ms/step - loss: 0.0571 - mae: 0.1932 - val_loss: 0.0200 - val_mae: 0.1174
Epoch 34/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0609 - mae: 0.1861
13/13 [==============================] - 0s 4ms/step - loss: 0.0588 - mae: 0.1932

13/13 [==============================] - 0s 30ms/step - loss: 0.0588 - mae: 0.1932 - val_loss: 0.0140 - val_mae: 0.0915
Epoch 35/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0880 - mae: 0.2452
13/13 [==============================] - 0s 4ms/step - loss: 0.0657 - mae: 0.2045

13/13 [==============================] - 0s 28ms/step - loss: 0.0657 - mae: 0.2045 - val_loss: 0.0138 - val_mae: 0.0906
Epoch 36/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0556 - mae: 0.1831
13/13 [==============================] - 0s 4ms/step - loss: 0.0589 - mae: 0.1953

13/13 [==============================] - 0s 36ms/step - loss: 0.0589 - mae: 0.1953 - val_loss: 0.0145 - val_mae: 0.0931
Epoch 37/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0618 - mae: 0.1968
13/13 [==============================] - 0s 4ms/step - loss: 0.0510 - mae: 0.1818

13/13 [==============================] - 0s 26ms/step - loss: 0.0510 - mae: 0.1818 - val_loss: 0.0149 - val_mae: 0.0947
Epoch 38/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0590 - mae: 0.1897
12/13 [==========================>...] - ETA: 0s - loss: 0.0504 - mae: 0.1790
13/13 [==============================] - 0s 5ms/step - loss: 0.0503 - mae: 0.1787

13/13 [==============================] - 0s 31ms/step - loss: 0.0503 - mae: 0.1787 - val_loss: 0.0136 - val_mae: 0.0890
Epoch 39/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0789 - mae: 0.2241
13/13 [==============================] - 0s 4ms/step - loss: 0.0473 - mae: 0.1767

13/13 [==============================] - 0s 37ms/step - loss: 0.0473 - mae: 0.1767 - val_loss: 0.0135 - val_mae: 0.0887
Epoch 40/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0652 - mae: 0.1909
13/13 [==============================] - 0s 3ms/step - loss: 0.0517 - mae: 0.1828

13/13 [==============================] - 0s 22ms/step - loss: 0.0517 - mae: 0.1828 - val_loss: 0.0134 - val_mae: 0.0884
Epoch 41/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0683 - mae: 0.2024
13/13 [==============================] - 0s 4ms/step - loss: 0.0503 - mae: 0.1755

13/13 [==============================] - 0s 35ms/step - loss: 0.0503 - mae: 0.1755 - val_loss: 0.0133 - val_mae: 0.0882
Epoch 42/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0600 - mae: 0.2031
13/13 [==============================] - 0s 4ms/step - loss: 0.0501 - mae: 0.1796

13/13 [==============================] - 0s 32ms/step - loss: 0.0501 - mae: 0.1796 - val_loss: 0.0205 - val_mae: 0.1138
Epoch 43/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0445 - mae: 0.1580
13/13 [==============================] - 0s 3ms/step - loss: 0.0447 - mae: 0.1693

13/13 [==============================] - 0s 20ms/step - loss: 0.0447 - mae: 0.1693 - val_loss: 0.0141 - val_mae: 0.0935
Epoch 44/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0395 - mae: 0.1602
12/13 [==========================>...] - ETA: 0s - loss: 0.0474 - mae: 0.1721
13/13 [==============================] - 0s 5ms/step - loss: 0.0474 - mae: 0.1723

13/13 [==============================] - 0s 38ms/step - loss: 0.0474 - mae: 0.1723 - val_loss: 0.0133 - val_mae: 0.0894
Epoch 45/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0367 - mae: 0.1399
13/13 [==============================] - 0s 4ms/step - loss: 0.0422 - mae: 0.1670

13/13 [==============================] - 0s 33ms/step - loss: 0.0422 - mae: 0.1670 - val_loss: 0.0165 - val_mae: 0.1049
Epoch 46/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0306 - mae: 0.1333
13/13 [==============================] - 0s 3ms/step - loss: 0.0433 - mae: 0.1712

13/13 [==============================] - 0s 26ms/step - loss: 0.0433 - mae: 0.1712 - val_loss: 0.0131 - val_mae: 0.0871
Epoch 47/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0434 - mae: 0.1715
13/13 [==============================] - 0s 4ms/step - loss: 0.0411 - mae: 0.1613

13/13 [==============================] - 0s 36ms/step - loss: 0.0411 - mae: 0.1613 - val_loss: 0.0130 - val_mae: 0.0877
Epoch 48/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0420 - mae: 0.1670
13/13 [==============================] - 0s 3ms/step - loss: 0.0387 - mae: 0.1553

13/13 [==============================] - 0s 23ms/step - loss: 0.0387 - mae: 0.1553 - val_loss: 0.0170 - val_mae: 0.1071
Epoch 49/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0632 - mae: 0.2067
13/13 [==============================] - 0s 4ms/step - loss: 0.0426 - mae: 0.1657

13/13 [==============================] - 0s 23ms/step - loss: 0.0426 - mae: 0.1657 - val_loss: 0.0137 - val_mae: 0.0920
Epoch 50/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0431 - mae: 0.1561
13/13 [==============================] - 0s 4ms/step - loss: 0.0395 - mae: 0.1634

13/13 [==============================] - 0s 35ms/step - loss: 0.0395 - mae: 0.1634 - val_loss: 0.0151 - val_mae: 0.0948

Run completed: runs/2022-12-07T02-40-43Z

Training run 25/52 (flags = list(16, 10, 0.001, 50, 50, "tanh", "sigmoid", 0.6, 0.6)) 
Using run directory runs/2022-12-07T02-41-17Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

1/8 [==>...........................] - ETA: 3s - loss: 1.0517 - mae: 0.8548
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0014s vs `on_train_batch_end` time: 0.0042s). Check your callbacks.

8/8 [==============================] - 1s 2ms/step - loss: 1.0574 - mae: 0.8316

8/8 [==============================] - 1s 114ms/step - loss: 1.0574 - mae: 0.8316 - val_loss: 0.2556 - val_mae: 0.4917
Epoch 2/50

1/8 [==>...........................] - ETA: 0s - loss: 0.9814 - mae: 0.8277
8/8 [==============================] - 0s 4ms/step - loss: 0.8574 - mae: 0.7535

8/8 [==============================] - 0s 37ms/step - loss: 0.8574 - mae: 0.7535 - val_loss: 0.2358 - val_mae: 0.4721
Epoch 3/50

1/8 [==>...........................] - ETA: 0s - loss: 1.2449 - mae: 0.8784
8/8 [==============================] - 0s 5ms/step - loss: 0.9952 - mae: 0.7979

8/8 [==============================] - 0s 57ms/step - loss: 0.9952 - mae: 0.7979 - val_loss: 0.2164 - val_mae: 0.4519
Epoch 4/50

1/8 [==>...........................] - ETA: 0s - loss: 0.9410 - mae: 0.7862
8/8 [==============================] - 0s 3ms/step - loss: 0.9018 - mae: 0.7630

8/8 [==============================] - 0s 39ms/step - loss: 0.9018 - mae: 0.7630 - val_loss: 0.1986 - val_mae: 0.4327
Epoch 5/50

1/8 [==>...........................] - ETA: 0s - loss: 0.9645 - mae: 0.7480
8/8 [==============================] - 0s 3ms/step - loss: 0.9774 - mae: 0.7821

8/8 [==============================] - 0s 39ms/step - loss: 0.9774 - mae: 0.7821 - val_loss: 0.1806 - val_mae: 0.4122
Epoch 6/50

1/8 [==>...........................] - ETA: 0s - loss: 0.7670 - mae: 0.6803
8/8 [==============================] - 0s 4ms/step - loss: 0.9091 - mae: 0.7756

8/8 [==============================] - 0s 64ms/step - loss: 0.9091 - mae: 0.7756 - val_loss: 0.1646 - val_mae: 0.3930
Epoch 7/50

1/8 [==>...........................] - ETA: 0s - loss: 0.7149 - mae: 0.7002
8/8 [==============================] - 0s 2ms/step - loss: 0.8172 - mae: 0.7213

8/8 [==============================] - 0s 30ms/step - loss: 0.8172 - mae: 0.7213 - val_loss: 0.1545 - val_mae: 0.3804
Epoch 8/50

1/8 [==>...........................] - ETA: 0s - loss: 0.7663 - mae: 0.6775
8/8 [==============================] - 0s 3ms/step - loss: 0.7834 - mae: 0.7109

8/8 [==============================] - 0s 36ms/step - loss: 0.7834 - mae: 0.7109 - val_loss: 0.1398 - val_mae: 0.3612
Epoch 9/50

1/8 [==>...........................] - ETA: 0s - loss: 0.7492 - mae: 0.7297
8/8 [==============================] - 0s 3ms/step - loss: 0.8053 - mae: 0.7292

8/8 [==============================] - 0s 31ms/step - loss: 0.8053 - mae: 0.7292 - val_loss: 0.1322 - val_mae: 0.3508
Epoch 10/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6727 - mae: 0.6811
8/8 [==============================] - 0s 3ms/step - loss: 0.7788 - mae: 0.7123

8/8 [==============================] - 0s 35ms/step - loss: 0.7788 - mae: 0.7123 - val_loss: 0.1215 - val_mae: 0.3356
Epoch 11/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6668 - mae: 0.6668
8/8 [==============================] - 0s 2ms/step - loss: 0.6536 - mae: 0.6726

8/8 [==============================] - 0s 31ms/step - loss: 0.6536 - mae: 0.6726 - val_loss: 0.1162 - val_mae: 0.3280
Epoch 12/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5858 - mae: 0.5916
8/8 [==============================] - 0s 3ms/step - loss: 0.8183 - mae: 0.7187

8/8 [==============================] - 0s 35ms/step - loss: 0.8183 - mae: 0.7187 - val_loss: 0.1074 - val_mae: 0.3146
Epoch 13/50

1/8 [==>...........................] - ETA: 0s - loss: 1.1607 - mae: 0.8791
8/8 [==============================] - 0s 3ms/step - loss: 0.7789 - mae: 0.7195

8/8 [==============================] - 0s 34ms/step - loss: 0.7789 - mae: 0.7195 - val_loss: 0.0983 - val_mae: 0.2999
Epoch 14/50

1/8 [==>...........................] - ETA: 0s - loss: 0.8060 - mae: 0.7515
8/8 [==============================] - 0s 3ms/step - loss: 0.7014 - mae: 0.6696

8/8 [==============================] - 0s 31ms/step - loss: 0.7014 - mae: 0.6696 - val_loss: 0.0926 - val_mae: 0.2904
Epoch 15/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5404 - mae: 0.5993
8/8 [==============================] - 0s 3ms/step - loss: 0.6608 - mae: 0.6402

8/8 [==============================] - 0s 38ms/step - loss: 0.6608 - mae: 0.6402 - val_loss: 0.0861 - val_mae: 0.2791
Epoch 16/50

1/8 [==>...........................] - ETA: 0s - loss: 0.7114 - mae: 0.6798
8/8 [==============================] - 0s 3ms/step - loss: 0.6674 - mae: 0.6677

8/8 [==============================] - 0s 31ms/step - loss: 0.6674 - mae: 0.6677 - val_loss: 0.0806 - val_mae: 0.2692
Epoch 17/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5899 - mae: 0.6133
8/8 [==============================] - 0s 3ms/step - loss: 0.5894 - mae: 0.6121

8/8 [==============================] - 0s 33ms/step - loss: 0.5894 - mae: 0.6121 - val_loss: 0.0770 - val_mae: 0.2626
Epoch 18/50

1/8 [==>...........................] - ETA: 0s - loss: 0.7995 - mae: 0.7493
8/8 [==============================] - 0s 3ms/step - loss: 0.7153 - mae: 0.6891

8/8 [==============================] - 0s 36ms/step - loss: 0.7153 - mae: 0.6891 - val_loss: 0.0731 - val_mae: 0.2551
Epoch 19/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6258 - mae: 0.6199
8/8 [==============================] - 0s 3ms/step - loss: 0.6785 - mae: 0.6562

8/8 [==============================] - 0s 35ms/step - loss: 0.6785 - mae: 0.6562 - val_loss: 0.0685 - val_mae: 0.2459
Epoch 20/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6352 - mae: 0.6511
8/8 [==============================] - 0s 3ms/step - loss: 0.5532 - mae: 0.5942

8/8 [==============================] - 0s 33ms/step - loss: 0.5532 - mae: 0.5942 - val_loss: 0.0628 - val_mae: 0.2340
Epoch 21/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6721 - mae: 0.7078
8/8 [==============================] - 0s 2ms/step - loss: 0.6068 - mae: 0.6277

8/8 [==============================] - 0s 31ms/step - loss: 0.6068 - mae: 0.6277 - val_loss: 0.0599 - val_mae: 0.2276
Epoch 22/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6924 - mae: 0.6790
8/8 [==============================] - 0s 3ms/step - loss: 0.6178 - mae: 0.6271

8/8 [==============================] - 0s 33ms/step - loss: 0.6178 - mae: 0.6271 - val_loss: 0.0571 - val_mae: 0.2213
Epoch 23/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4010 - mae: 0.4623
8/8 [==============================] - 0s 3ms/step - loss: 0.5633 - mae: 0.5974

8/8 [==============================] - 0s 35ms/step - loss: 0.5633 - mae: 0.5974 - val_loss: 0.0536 - val_mae: 0.2137
Epoch 24/50

1/8 [==>...........................] - ETA: 0s - loss: 0.8513 - mae: 0.6871
8/8 [==============================] - 0s 3ms/step - loss: 0.6158 - mae: 0.6195

8/8 [==============================] - 0s 32ms/step - loss: 0.6158 - mae: 0.6195 - val_loss: 0.0523 - val_mae: 0.2107
Epoch 25/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6260 - mae: 0.6341
8/8 [==============================] - 0s 3ms/step - loss: 0.6147 - mae: 0.6217

8/8 [==============================] - 0s 33ms/step - loss: 0.6147 - mae: 0.6217 - val_loss: 0.0501 - val_mae: 0.2057
Epoch 26/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4142 - mae: 0.5156
8/8 [==============================] - 0s 3ms/step - loss: 0.5831 - mae: 0.6125

8/8 [==============================] - 0s 47ms/step - loss: 0.5831 - mae: 0.6125 - val_loss: 0.0489 - val_mae: 0.2027
Epoch 27/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4972 - mae: 0.5413
8/8 [==============================] - 0s 4ms/step - loss: 0.5968 - mae: 0.6238

8/8 [==============================] - 0s 62ms/step - loss: 0.5968 - mae: 0.6238 - val_loss: 0.0470 - val_mae: 0.1983
Epoch 28/50

1/8 [==>...........................] - ETA: 0s - loss: 0.7054 - mae: 0.6374
8/8 [==============================] - 0s 5ms/step - loss: 0.5503 - mae: 0.5894

8/8 [==============================] - 0s 59ms/step - loss: 0.5503 - mae: 0.5894 - val_loss: 0.0452 - val_mae: 0.1940
Epoch 29/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4915 - mae: 0.5858
8/8 [==============================] - 0s 3ms/step - loss: 0.5346 - mae: 0.5942

8/8 [==============================] - 0s 32ms/step - loss: 0.5346 - mae: 0.5942 - val_loss: 0.0437 - val_mae: 0.1903
Epoch 30/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6416 - mae: 0.6931
8/8 [==============================] - 0s 3ms/step - loss: 0.5847 - mae: 0.6210

8/8 [==============================] - 0s 33ms/step - loss: 0.5847 - mae: 0.6210 - val_loss: 0.0418 - val_mae: 0.1858
Epoch 31/50

1/8 [==>...........................] - ETA: 0s - loss: 0.7457 - mae: 0.7105
8/8 [==============================] - 0s 3ms/step - loss: 0.5628 - mae: 0.6028

8/8 [==============================] - 0s 36ms/step - loss: 0.5628 - mae: 0.6028 - val_loss: 0.0401 - val_mae: 0.1815
Epoch 32/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3990 - mae: 0.5075
8/8 [==============================] - 0s 3ms/step - loss: 0.4947 - mae: 0.5662

8/8 [==============================] - 0s 33ms/step - loss: 0.4947 - mae: 0.5662 - val_loss: 0.0391 - val_mae: 0.1788
Epoch 33/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5527 - mae: 0.5713
8/8 [==============================] - 0s 3ms/step - loss: 0.5011 - mae: 0.5668

8/8 [==============================] - 0s 35ms/step - loss: 0.5011 - mae: 0.5668 - val_loss: 0.0373 - val_mae: 0.1742
Epoch 34/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6100 - mae: 0.6579
8/8 [==============================] - 0s 3ms/step - loss: 0.5826 - mae: 0.6153

8/8 [==============================] - 0s 33ms/step - loss: 0.5826 - mae: 0.6153 - val_loss: 0.0370 - val_mae: 0.1735
Epoch 35/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3864 - mae: 0.5232
8/8 [==============================] - 0s 3ms/step - loss: 0.4433 - mae: 0.5361

8/8 [==============================] - 0s 33ms/step - loss: 0.4433 - mae: 0.5361 - val_loss: 0.0359 - val_mae: 0.1707
Epoch 36/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4879 - mae: 0.5754
8/8 [==============================] - 0s 3ms/step - loss: 0.4980 - mae: 0.5589

8/8 [==============================] - 0s 35ms/step - loss: 0.4980 - mae: 0.5589 - val_loss: 0.0342 - val_mae: 0.1660
Epoch 37/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4890 - mae: 0.5717
8/8 [==============================] - 0s 3ms/step - loss: 0.4797 - mae: 0.5469

8/8 [==============================] - 0s 33ms/step - loss: 0.4797 - mae: 0.5469 - val_loss: 0.0333 - val_mae: 0.1637
Epoch 38/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4678 - mae: 0.5775
8/8 [==============================] - 0s 3ms/step - loss: 0.5153 - mae: 0.5698

8/8 [==============================] - 0s 33ms/step - loss: 0.5153 - mae: 0.5698 - val_loss: 0.0319 - val_mae: 0.1596
Epoch 39/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5357 - mae: 0.6123
8/8 [==============================] - 0s 2ms/step - loss: 0.4724 - mae: 0.5638

8/8 [==============================] - 0s 31ms/step - loss: 0.4724 - mae: 0.5638 - val_loss: 0.0306 - val_mae: 0.1558
Epoch 40/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3811 - mae: 0.5176
8/8 [==============================] - 0s 4ms/step - loss: 0.4848 - mae: 0.5617

8/8 [==============================] - 0s 37ms/step - loss: 0.4848 - mae: 0.5617 - val_loss: 0.0302 - val_mae: 0.1546
Epoch 41/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3927 - mae: 0.4885
8/8 [==============================] - 0s 3ms/step - loss: 0.4409 - mae: 0.5355

8/8 [==============================] - 0s 33ms/step - loss: 0.4409 - mae: 0.5355 - val_loss: 0.0295 - val_mae: 0.1527
Epoch 42/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3572 - mae: 0.4913
8/8 [==============================] - 0s 3ms/step - loss: 0.4840 - mae: 0.5638

8/8 [==============================] - 0s 36ms/step - loss: 0.4840 - mae: 0.5638 - val_loss: 0.0286 - val_mae: 0.1500
Epoch 43/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4066 - mae: 0.5234
8/8 [==============================] - 0s 3ms/step - loss: 0.4429 - mae: 0.5395

8/8 [==============================] - 0s 31ms/step - loss: 0.4429 - mae: 0.5395 - val_loss: 0.0285 - val_mae: 0.1498
Epoch 44/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4233 - mae: 0.5213
8/8 [==============================] - 0s 3ms/step - loss: 0.4234 - mae: 0.5235

8/8 [==============================] - 0s 37ms/step - loss: 0.4234 - mae: 0.5235 - val_loss: 0.0272 - val_mae: 0.1457
Epoch 45/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3453 - mae: 0.4900
8/8 [==============================] - 0s 3ms/step - loss: 0.4387 - mae: 0.5310

8/8 [==============================] - 0s 36ms/step - loss: 0.4387 - mae: 0.5310 - val_loss: 0.0266 - val_mae: 0.1440
Epoch 46/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3459 - mae: 0.4592
8/8 [==============================] - 0s 3ms/step - loss: 0.4248 - mae: 0.5173

8/8 [==============================] - 0s 31ms/step - loss: 0.4248 - mae: 0.5173 - val_loss: 0.0262 - val_mae: 0.1425
Epoch 47/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3729 - mae: 0.5010
8/8 [==============================] - 0s 3ms/step - loss: 0.4464 - mae: 0.5234

8/8 [==============================] - 0s 38ms/step - loss: 0.4464 - mae: 0.5234 - val_loss: 0.0258 - val_mae: 0.1414
Epoch 48/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2884 - mae: 0.4475
8/8 [==============================] - 0s 3ms/step - loss: 0.4887 - mae: 0.5446

8/8 [==============================] - 0s 35ms/step - loss: 0.4887 - mae: 0.5446 - val_loss: 0.0256 - val_mae: 0.1409
Epoch 49/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3317 - mae: 0.4648
8/8 [==============================] - 0s 3ms/step - loss: 0.4085 - mae: 0.5162

8/8 [==============================] - 0s 29ms/step - loss: 0.4085 - mae: 0.5162 - val_loss: 0.0260 - val_mae: 0.1421
Epoch 50/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4034 - mae: 0.5166
8/8 [==============================] - 0s 2ms/step - loss: 0.4340 - mae: 0.5268

8/8 [==============================] - 0s 33ms/step - loss: 0.4340 - mae: 0.5268 - val_loss: 0.0256 - val_mae: 0.1408

Run completed: runs/2022-12-07T02-41-17Z

Training run 26/52 (flags = list(32, 50, 0.01, 30, 30, "relu", "tanh", 0.6, 0.6)) 
Using run directory runs/2022-12-07T02-41-45Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

 1/13 [=>............................] - ETA: 6s - loss: 1.6998 - mae: 1.0332
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0025s vs `on_train_batch_end` time: 0.0047s). Check your callbacks.

13/13 [==============================] - 1s 5ms/step - loss: 1.0037 - mae: 0.7843

13/13 [==============================] - 2s 78ms/step - loss: 1.0037 - mae: 0.7843 - val_loss: 0.1112 - val_mae: 0.2740
Epoch 2/30

 1/13 [=>............................] - ETA: 0s - loss: 0.7134 - mae: 0.7185
13/13 [==============================] - 0s 4ms/step - loss: 0.5861 - mae: 0.6121

13/13 [==============================] - 0s 24ms/step - loss: 0.5861 - mae: 0.6121 - val_loss: 0.0630 - val_mae: 0.2027
Epoch 3/30

 1/13 [=>............................] - ETA: 0s - loss: 0.3186 - mae: 0.4178
13/13 [==============================] - 0s 4ms/step - loss: 0.5227 - mae: 0.5518

13/13 [==============================] - 0s 28ms/step - loss: 0.5227 - mae: 0.5518 - val_loss: 0.0444 - val_mae: 0.1701
Epoch 4/30

 1/13 [=>............................] - ETA: 0s - loss: 0.7174 - mae: 0.6626
13/13 [==============================] - 0s 3ms/step - loss: 0.3870 - mae: 0.4962

13/13 [==============================] - 0s 18ms/step - loss: 0.3870 - mae: 0.4962 - val_loss: 0.0322 - val_mae: 0.1466
Epoch 5/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4583 - mae: 0.5391
13/13 [==============================] - 0s 3ms/step - loss: 0.3756 - mae: 0.4775

13/13 [==============================] - 0s 19ms/step - loss: 0.3756 - mae: 0.4775 - val_loss: 0.0275 - val_mae: 0.1356
Epoch 6/30

 1/13 [=>............................] - ETA: 0s - loss: 0.3344 - mae: 0.4679
13/13 [==============================] - 0s 3ms/step - loss: 0.3319 - mae: 0.4554

13/13 [==============================] - 0s 21ms/step - loss: 0.3319 - mae: 0.4554 - val_loss: 0.0261 - val_mae: 0.1285
Epoch 7/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4832 - mae: 0.4919
13/13 [==============================] - 0s 3ms/step - loss: 0.2685 - mae: 0.3842

13/13 [==============================] - 0s 20ms/step - loss: 0.2685 - mae: 0.3842 - val_loss: 0.0227 - val_mae: 0.1173
Epoch 8/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2534 - mae: 0.4071
13/13 [==============================] - 0s 4ms/step - loss: 0.1952 - mae: 0.3442

13/13 [==============================] - 0s 35ms/step - loss: 0.1952 - mae: 0.3442 - val_loss: 0.0188 - val_mae: 0.1053
Epoch 9/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2093 - mae: 0.3572
13/13 [==============================] - 0s 4ms/step - loss: 0.1973 - mae: 0.3441

13/13 [==============================] - 0s 32ms/step - loss: 0.1973 - mae: 0.3441 - val_loss: 0.0174 - val_mae: 0.1039
Epoch 10/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2598 - mae: 0.4129
13/13 [==============================] - 0s 2ms/step - loss: 0.1770 - mae: 0.3303

13/13 [==============================] - 0s 19ms/step - loss: 0.1770 - mae: 0.3303 - val_loss: 0.0198 - val_mae: 0.1215
Epoch 11/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1573 - mae: 0.3384
13/13 [==============================] - 0s 2ms/step - loss: 0.1630 - mae: 0.3148

13/13 [==============================] - 0s 20ms/step - loss: 0.1630 - mae: 0.3148 - val_loss: 0.0165 - val_mae: 0.1068
Epoch 12/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2175 - mae: 0.3677
13/13 [==============================] - 0s 2ms/step - loss: 0.1458 - mae: 0.2968

13/13 [==============================] - 0s 18ms/step - loss: 0.1458 - mae: 0.2968 - val_loss: 0.0130 - val_mae: 0.0902
Epoch 13/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0783 - mae: 0.2290
13/13 [==============================] - 0s 3ms/step - loss: 0.1081 - mae: 0.2588

13/13 [==============================] - 0s 21ms/step - loss: 0.1081 - mae: 0.2588 - val_loss: 0.0122 - val_mae: 0.0801
Epoch 14/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2005 - mae: 0.3676
13/13 [==============================] - 0s 3ms/step - loss: 0.1146 - mae: 0.2648

13/13 [==============================] - 0s 19ms/step - loss: 0.1146 - mae: 0.2648 - val_loss: 0.0124 - val_mae: 0.0822
Epoch 15/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0996 - mae: 0.2545
13/13 [==============================] - 0s 3ms/step - loss: 0.0893 - mae: 0.2344

13/13 [==============================] - 1s 48ms/step - loss: 0.0893 - mae: 0.2344 - val_loss: 0.0105 - val_mae: 0.0750
Epoch 16/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0740 - mae: 0.2231
13/13 [==============================] - 0s 3ms/step - loss: 0.0866 - mae: 0.2276

13/13 [==============================] - 0s 19ms/step - loss: 0.0866 - mae: 0.2276 - val_loss: 0.0105 - val_mae: 0.0748
Epoch 17/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0640 - mae: 0.1985
13/13 [==============================] - 0s 3ms/step - loss: 0.0759 - mae: 0.2199

13/13 [==============================] - 0s 20ms/step - loss: 0.0759 - mae: 0.2199 - val_loss: 0.0101 - val_mae: 0.0751
Epoch 18/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0446 - mae: 0.1735
13/13 [==============================] - 0s 2ms/step - loss: 0.0679 - mae: 0.2054

13/13 [==============================] - 0s 18ms/step - loss: 0.0679 - mae: 0.2054 - val_loss: 0.0108 - val_mae: 0.0841
Epoch 19/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0392 - mae: 0.1675
13/13 [==============================] - 0s 3ms/step - loss: 0.0746 - mae: 0.2146

13/13 [==============================] - 0s 20ms/step - loss: 0.0746 - mae: 0.2146 - val_loss: 0.0104 - val_mae: 0.0761
Epoch 20/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0740 - mae: 0.1998
13/13 [==============================] - 0s 3ms/step - loss: 0.0643 - mae: 0.1996

13/13 [==============================] - 0s 21ms/step - loss: 0.0643 - mae: 0.1996 - val_loss: 0.0100 - val_mae: 0.0748
Epoch 21/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0421 - mae: 0.1545
13/13 [==============================] - 0s 4ms/step - loss: 0.0702 - mae: 0.2084

13/13 [==============================] - 0s 25ms/step - loss: 0.0702 - mae: 0.2084 - val_loss: 0.0091 - val_mae: 0.0749
Epoch 22/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0859 - mae: 0.2336
13/13 [==============================] - 0s 3ms/step - loss: 0.0603 - mae: 0.1959

13/13 [==============================] - 0s 25ms/step - loss: 0.0603 - mae: 0.1959 - val_loss: 0.0098 - val_mae: 0.0791
Epoch 23/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0311 - mae: 0.1513
13/13 [==============================] - 0s 3ms/step - loss: 0.0473 - mae: 0.1715

13/13 [==============================] - 0s 25ms/step - loss: 0.0473 - mae: 0.1715 - val_loss: 0.0097 - val_mae: 0.0794
Epoch 24/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0579 - mae: 0.2010
13/13 [==============================] - 0s 3ms/step - loss: 0.0487 - mae: 0.1758

13/13 [==============================] - 0s 21ms/step - loss: 0.0487 - mae: 0.1758 - val_loss: 0.0098 - val_mae: 0.0820
Epoch 25/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0461 - mae: 0.1692
13/13 [==============================] - 0s 3ms/step - loss: 0.0466 - mae: 0.1715

13/13 [==============================] - 0s 18ms/step - loss: 0.0466 - mae: 0.1715 - val_loss: 0.0092 - val_mae: 0.0794
Epoch 26/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0422 - mae: 0.1712
13/13 [==============================] - 0s 3ms/step - loss: 0.0431 - mae: 0.1671

13/13 [==============================] - 0s 19ms/step - loss: 0.0431 - mae: 0.1671 - val_loss: 0.0089 - val_mae: 0.0758
Epoch 27/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0415 - mae: 0.1717
13/13 [==============================] - 0s 3ms/step - loss: 0.0404 - mae: 0.1618

13/13 [==============================] - 0s 18ms/step - loss: 0.0404 - mae: 0.1618 - val_loss: 0.0094 - val_mae: 0.0793
Epoch 28/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0396 - mae: 0.1715
13/13 [==============================] - 0s 3ms/step - loss: 0.0367 - mae: 0.1518

13/13 [==============================] - 0s 23ms/step - loss: 0.0367 - mae: 0.1518 - val_loss: 0.0080 - val_mae: 0.0687
Epoch 29/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0457 - mae: 0.1706
13/13 [==============================] - 0s 3ms/step - loss: 0.0359 - mae: 0.1522

13/13 [==============================] - 0s 20ms/step - loss: 0.0359 - mae: 0.1522 - val_loss: 0.0077 - val_mae: 0.0668
Epoch 30/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0328 - mae: 0.1448
13/13 [==============================] - 0s 3ms/step - loss: 0.0330 - mae: 0.1432

13/13 [==============================] - 0s 19ms/step - loss: 0.0330 - mae: 0.1432 - val_loss: 0.0076 - val_mae: 0.0677

Run completed: runs/2022-12-07T02-41-45Z

Training run 27/52 (flags = list(32, 32, 0.001, 30, 50, "sigmoid", "tanh", 0.2, 0.2)) 
Using run directory runs/2022-12-07T02-42-18Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

 1/13 [=>............................] - ETA: 8s - loss: 0.2526 - mae: 0.4148
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0020s vs `on_train_batch_end` time: 0.0043s). Check your callbacks.

13/13 [==============================] - 1s 3ms/step - loss: 0.2270 - mae: 0.3905

13/13 [==============================] - 2s 75ms/step - loss: 0.2270 - mae: 0.3905 - val_loss: 0.0386 - val_mae: 0.1615
Epoch 2/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2377 - mae: 0.3771
13/13 [==============================] - 0s 3ms/step - loss: 0.2569 - mae: 0.3984

13/13 [==============================] - 0s 20ms/step - loss: 0.2569 - mae: 0.3984 - val_loss: 0.0255 - val_mae: 0.1320
Epoch 3/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3408 - mae: 0.4655
13/13 [==============================] - 0s 3ms/step - loss: 0.2178 - mae: 0.3708

13/13 [==============================] - 0s 27ms/step - loss: 0.2178 - mae: 0.3708 - val_loss: 0.0219 - val_mae: 0.1226
Epoch 4/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2901 - mae: 0.4213
13/13 [==============================] - 0s 3ms/step - loss: 0.1973 - mae: 0.3582

13/13 [==============================] - 0s 24ms/step - loss: 0.1973 - mae: 0.3582 - val_loss: 0.0223 - val_mae: 0.1232
Epoch 5/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2531 - mae: 0.4082
13/13 [==============================] - 0s 3ms/step - loss: 0.2091 - mae: 0.3685

13/13 [==============================] - 0s 26ms/step - loss: 0.2091 - mae: 0.3685 - val_loss: 0.0181 - val_mae: 0.1103
Epoch 6/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2147 - mae: 0.3616
13/13 [==============================] - 0s 3ms/step - loss: 0.2069 - mae: 0.3609

13/13 [==============================] - 0s 27ms/step - loss: 0.2069 - mae: 0.3609 - val_loss: 0.0157 - val_mae: 0.1009
Epoch 7/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1955 - mae: 0.3653
13/13 [==============================] - 0s 3ms/step - loss: 0.2049 - mae: 0.3624

13/13 [==============================] - 0s 25ms/step - loss: 0.2049 - mae: 0.3624 - val_loss: 0.0147 - val_mae: 0.0970
Epoch 8/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2425 - mae: 0.3761
13/13 [==============================] - 0s 3ms/step - loss: 0.2162 - mae: 0.3753

13/13 [==============================] - 0s 26ms/step - loss: 0.2162 - mae: 0.3753 - val_loss: 0.0148 - val_mae: 0.0975
Epoch 9/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2203 - mae: 0.4131
13/13 [==============================] - 0s 3ms/step - loss: 0.1931 - mae: 0.3453

13/13 [==============================] - 0s 21ms/step - loss: 0.1931 - mae: 0.3453 - val_loss: 0.0160 - val_mae: 0.1020
Epoch 10/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1619 - mae: 0.3430
13/13 [==============================] - 0s 4ms/step - loss: 0.2064 - mae: 0.3473

13/13 [==============================] - 0s 23ms/step - loss: 0.2064 - mae: 0.3473 - val_loss: 0.0152 - val_mae: 0.0986
Epoch 11/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1248 - mae: 0.2938
13/13 [==============================] - 0s 4ms/step - loss: 0.1692 - mae: 0.3259

13/13 [==============================] - 0s 31ms/step - loss: 0.1692 - mae: 0.3259 - val_loss: 0.0149 - val_mae: 0.0976
Epoch 12/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2056 - mae: 0.3620
13/13 [==============================] - 0s 3ms/step - loss: 0.1920 - mae: 0.3474

13/13 [==============================] - 0s 20ms/step - loss: 0.1920 - mae: 0.3474 - val_loss: 0.0143 - val_mae: 0.0951
Epoch 13/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2283 - mae: 0.4055
13/13 [==============================] - 0s 3ms/step - loss: 0.2134 - mae: 0.3651

13/13 [==============================] - 0s 26ms/step - loss: 0.2134 - mae: 0.3651 - val_loss: 0.0126 - val_mae: 0.0886
Epoch 14/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1556 - mae: 0.3194
13/13 [==============================] - 0s 3ms/step - loss: 0.1806 - mae: 0.3436

13/13 [==============================] - 0s 24ms/step - loss: 0.1806 - mae: 0.3436 - val_loss: 0.0129 - val_mae: 0.0897
Epoch 15/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2092 - mae: 0.3767
13/13 [==============================] - 0s 3ms/step - loss: 0.2038 - mae: 0.3538

13/13 [==============================] - 0s 22ms/step - loss: 0.2038 - mae: 0.3538 - val_loss: 0.0119 - val_mae: 0.0859
Epoch 16/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1596 - mae: 0.3360
13/13 [==============================] - 0s 4ms/step - loss: 0.1911 - mae: 0.3534

13/13 [==============================] - 0s 25ms/step - loss: 0.1911 - mae: 0.3534 - val_loss: 0.0120 - val_mae: 0.0857
Epoch 17/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3273 - mae: 0.4461
13/13 [==============================] - 0s 3ms/step - loss: 0.1834 - mae: 0.3435

13/13 [==============================] - 0s 25ms/step - loss: 0.1834 - mae: 0.3435 - val_loss: 0.0121 - val_mae: 0.0855
Epoch 18/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1262 - mae: 0.2715
13/13 [==============================] - 0s 3ms/step - loss: 0.1796 - mae: 0.3338

13/13 [==============================] - 0s 22ms/step - loss: 0.1796 - mae: 0.3338 - val_loss: 0.0106 - val_mae: 0.0803
Epoch 19/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1706 - mae: 0.3254
13/13 [==============================] - 0s 4ms/step - loss: 0.1689 - mae: 0.3293

13/13 [==============================] - 0s 29ms/step - loss: 0.1689 - mae: 0.3293 - val_loss: 0.0114 - val_mae: 0.0824
Epoch 20/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2139 - mae: 0.3799
13/13 [==============================] - 0s 3ms/step - loss: 0.1836 - mae: 0.3481

13/13 [==============================] - 0s 21ms/step - loss: 0.1836 - mae: 0.3481 - val_loss: 0.0115 - val_mae: 0.0826
Epoch 21/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1771 - mae: 0.3083
13/13 [==============================] - 0s 3ms/step - loss: 0.1578 - mae: 0.3154

13/13 [==============================] - 0s 22ms/step - loss: 0.1578 - mae: 0.3154 - val_loss: 0.0108 - val_mae: 0.0802
Epoch 22/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1824 - mae: 0.3297
13/13 [==============================] - 0s 4ms/step - loss: 0.1707 - mae: 0.3224

13/13 [==============================] - 0s 24ms/step - loss: 0.1707 - mae: 0.3224 - val_loss: 0.0116 - val_mae: 0.0825
Epoch 23/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2132 - mae: 0.3622
13/13 [==============================] - 0s 3ms/step - loss: 0.1869 - mae: 0.3464

13/13 [==============================] - 0s 21ms/step - loss: 0.1869 - mae: 0.3464 - val_loss: 0.0105 - val_mae: 0.0787
Epoch 24/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1451 - mae: 0.3088
13/13 [==============================] - 0s 3ms/step - loss: 0.1661 - mae: 0.3220

13/13 [==============================] - 0s 29ms/step - loss: 0.1661 - mae: 0.3220 - val_loss: 0.0098 - val_mae: 0.0764
Epoch 25/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1915 - mae: 0.3525
13/13 [==============================] - 0s 3ms/step - loss: 0.1771 - mae: 0.3357

13/13 [==============================] - 0s 24ms/step - loss: 0.1771 - mae: 0.3357 - val_loss: 0.0095 - val_mae: 0.0754
Epoch 26/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1559 - mae: 0.3026
13/13 [==============================] - 0s 3ms/step - loss: 0.1619 - mae: 0.3209

13/13 [==============================] - 0s 22ms/step - loss: 0.1619 - mae: 0.3209 - val_loss: 0.0090 - val_mae: 0.0747
Epoch 27/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1763 - mae: 0.3323
13/13 [==============================] - 0s 3ms/step - loss: 0.1719 - mae: 0.3283

13/13 [==============================] - 0s 24ms/step - loss: 0.1719 - mae: 0.3283 - val_loss: 0.0092 - val_mae: 0.0739
Epoch 28/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2215 - mae: 0.3658
13/13 [==============================] - 0s 3ms/step - loss: 0.1778 - mae: 0.3315

13/13 [==============================] - 0s 23ms/step - loss: 0.1778 - mae: 0.3315 - val_loss: 0.0094 - val_mae: 0.0739
Epoch 29/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1272 - mae: 0.2996
13/13 [==============================] - 0s 3ms/step - loss: 0.1928 - mae: 0.3552

13/13 [==============================] - 0s 22ms/step - loss: 0.1928 - mae: 0.3552 - val_loss: 0.0089 - val_mae: 0.0726
Epoch 30/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1281 - mae: 0.2892
13/13 [==============================] - 0s 4ms/step - loss: 0.1653 - mae: 0.3262

13/13 [==============================] - 0s 30ms/step - loss: 0.1653 - mae: 0.3262 - val_loss: 0.0086 - val_mae: 0.0722
Epoch 31/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1828 - mae: 0.3517
13/13 [==============================] - 0s 3ms/step - loss: 0.1633 - mae: 0.3172

13/13 [==============================] - 0s 22ms/step - loss: 0.1633 - mae: 0.3172 - val_loss: 0.0085 - val_mae: 0.0715
Epoch 32/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1455 - mae: 0.3211
13/13 [==============================] - 0s 3ms/step - loss: 0.1620 - mae: 0.3207

13/13 [==============================] - 0s 24ms/step - loss: 0.1620 - mae: 0.3207 - val_loss: 0.0084 - val_mae: 0.0706
Epoch 33/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1174 - mae: 0.2887
13/13 [==============================] - 0s 3ms/step - loss: 0.1438 - mae: 0.3078

13/13 [==============================] - 0s 28ms/step - loss: 0.1438 - mae: 0.3078 - val_loss: 0.0081 - val_mae: 0.0708
Epoch 34/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1479 - mae: 0.3144
13/13 [==============================] - 0s 3ms/step - loss: 0.1544 - mae: 0.3089

13/13 [==============================] - 0s 22ms/step - loss: 0.1544 - mae: 0.3089 - val_loss: 0.0085 - val_mae: 0.0704
Epoch 35/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2549 - mae: 0.3806
13/13 [==============================] - 0s 3ms/step - loss: 0.1701 - mae: 0.3259

13/13 [==============================] - 0s 23ms/step - loss: 0.1701 - mae: 0.3259 - val_loss: 0.0082 - val_mae: 0.0697
Epoch 36/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1655 - mae: 0.3468
13/13 [==============================] - 0s 4ms/step - loss: 0.1520 - mae: 0.3136

13/13 [==============================] - 0s 32ms/step - loss: 0.1520 - mae: 0.3136 - val_loss: 0.0092 - val_mae: 0.0710
Epoch 37/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1110 - mae: 0.2629
13/13 [==============================] - 0s 3ms/step - loss: 0.1655 - mae: 0.3220

13/13 [==============================] - 0s 20ms/step - loss: 0.1655 - mae: 0.3220 - val_loss: 0.0088 - val_mae: 0.0698
Epoch 38/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1886 - mae: 0.3726
13/13 [==============================] - 0s 3ms/step - loss: 0.1444 - mae: 0.3063

13/13 [==============================] - 0s 26ms/step - loss: 0.1444 - mae: 0.3063 - val_loss: 0.0086 - val_mae: 0.0689
Epoch 39/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1053 - mae: 0.2338
13/13 [==============================] - 0s 3ms/step - loss: 0.1344 - mae: 0.2930

13/13 [==============================] - 0s 25ms/step - loss: 0.1344 - mae: 0.2930 - val_loss: 0.0097 - val_mae: 0.0725
Epoch 40/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1634 - mae: 0.3278
13/13 [==============================] - 0s 3ms/step - loss: 0.1530 - mae: 0.3155

13/13 [==============================] - 0s 25ms/step - loss: 0.1530 - mae: 0.3155 - val_loss: 0.0099 - val_mae: 0.0731
Epoch 41/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1246 - mae: 0.3067
13/13 [==============================] - 0s 4ms/step - loss: 0.1604 - mae: 0.3256

13/13 [==============================] - 0s 26ms/step - loss: 0.1604 - mae: 0.3256 - val_loss: 0.0088 - val_mae: 0.0690
Epoch 42/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1514 - mae: 0.3072
13/13 [==============================] - 0s 3ms/step - loss: 0.1234 - mae: 0.2820

13/13 [==============================] - 0s 22ms/step - loss: 0.1234 - mae: 0.2820 - val_loss: 0.0083 - val_mae: 0.0678
Epoch 43/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1565 - mae: 0.3256
13/13 [==============================] - 0s 3ms/step - loss: 0.1462 - mae: 0.3012

13/13 [==============================] - 0s 24ms/step - loss: 0.1462 - mae: 0.3012 - val_loss: 0.0085 - val_mae: 0.0679
Epoch 44/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1729 - mae: 0.3094
13/13 [==============================] - 0s 4ms/step - loss: 0.1436 - mae: 0.2993

13/13 [==============================] - 0s 26ms/step - loss: 0.1436 - mae: 0.2993 - val_loss: 0.0082 - val_mae: 0.0673
Epoch 45/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0855 - mae: 0.2257
13/13 [==============================] - 0s 3ms/step - loss: 0.1273 - mae: 0.2770

13/13 [==============================] - 0s 23ms/step - loss: 0.1273 - mae: 0.2770 - val_loss: 0.0090 - val_mae: 0.0692
Epoch 46/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0948 - mae: 0.2624
13/13 [==============================] - 0s 3ms/step - loss: 0.1341 - mae: 0.2897

13/13 [==============================] - 0s 22ms/step - loss: 0.1341 - mae: 0.2897 - val_loss: 0.0077 - val_mae: 0.0656
Epoch 47/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1237 - mae: 0.2778
13/13 [==============================] - 0s 3ms/step - loss: 0.1282 - mae: 0.2821

13/13 [==============================] - 0s 25ms/step - loss: 0.1282 - mae: 0.2821 - val_loss: 0.0083 - val_mae: 0.0670
Epoch 48/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0883 - mae: 0.2355
13/13 [==============================] - 0s 3ms/step - loss: 0.1221 - mae: 0.2833

13/13 [==============================] - 0s 26ms/step - loss: 0.1221 - mae: 0.2833 - val_loss: 0.0070 - val_mae: 0.0638
Epoch 49/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0815 - mae: 0.2347
13/13 [==============================] - 0s 4ms/step - loss: 0.1485 - mae: 0.3041

13/13 [==============================] - 0s 27ms/step - loss: 0.1485 - mae: 0.3041 - val_loss: 0.0073 - val_mae: 0.0644
Epoch 50/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1268 - mae: 0.2739
13/13 [==============================] - 0s 3ms/step - loss: 0.1419 - mae: 0.2952

13/13 [==============================] - 0s 19ms/step - loss: 0.1419 - mae: 0.2952 - val_loss: 0.0085 - val_mae: 0.0667

Run completed: runs/2022-12-07T02-42-18Z

Training run 28/52 (flags = list(64, 32, 0.01, 30, 30, "tanh", "relu", 0.2, 0.6)) 
Using run directory runs/2022-12-07T02-42-49Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

 1/13 [=>............................] - ETA: 9s - loss: 2.1605 - mae: 1.1485
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0014s vs `on_train_batch_end` time: 0.0035s). Check your callbacks.

13/13 [==============================] - 1s 2ms/step - loss: 0.8298 - mae: 0.7217

13/13 [==============================] - 2s 65ms/step - loss: 0.8298 - mae: 0.7217 - val_loss: 0.1422 - val_mae: 0.3408
Epoch 2/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4736 - mae: 0.5150
13/13 [==============================] - 0s 3ms/step - loss: 0.3564 - mae: 0.4705

13/13 [==============================] - 0s 22ms/step - loss: 0.3564 - mae: 0.4705 - val_loss: 0.0410 - val_mae: 0.1671
Epoch 3/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2962 - mae: 0.4375
12/13 [==========================>...] - ETA: 0s - loss: 0.2431 - mae: 0.3843
13/13 [==============================] - 0s 5ms/step - loss: 0.2435 - mae: 0.3850

13/13 [==============================] - 0s 35ms/step - loss: 0.2435 - mae: 0.3850 - val_loss: 0.0315 - val_mae: 0.1461
Epoch 4/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1853 - mae: 0.3424
12/13 [==========================>...] - ETA: 0s - loss: 0.2203 - mae: 0.3790
13/13 [==============================] - 0s 5ms/step - loss: 0.2229 - mae: 0.3814

13/13 [==============================] - 0s 35ms/step - loss: 0.2229 - mae: 0.3814 - val_loss: 0.0265 - val_mae: 0.1326
Epoch 5/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2011 - mae: 0.3277
13/13 [==============================] - 0s 3ms/step - loss: 0.1916 - mae: 0.3387

13/13 [==============================] - 0s 19ms/step - loss: 0.1916 - mae: 0.3387 - val_loss: 0.0267 - val_mae: 0.1369
Epoch 6/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1519 - mae: 0.3252
13/13 [==============================] - 0s 3ms/step - loss: 0.1263 - mae: 0.2845

13/13 [==============================] - 0s 19ms/step - loss: 0.1263 - mae: 0.2845 - val_loss: 0.0192 - val_mae: 0.1127
Epoch 7/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2629 - mae: 0.4055
13/13 [==============================] - 0s 3ms/step - loss: 0.1451 - mae: 0.2867

13/13 [==============================] - 0s 20ms/step - loss: 0.1451 - mae: 0.2867 - val_loss: 0.0186 - val_mae: 0.1129
Epoch 8/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1176 - mae: 0.2864
13/13 [==============================] - 0s 4ms/step - loss: 0.1245 - mae: 0.2760

13/13 [==============================] - 0s 32ms/step - loss: 0.1245 - mae: 0.2760 - val_loss: 0.0170 - val_mae: 0.1124
Epoch 9/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1351 - mae: 0.2741
13/13 [==============================] - 0s 4ms/step - loss: 0.1039 - mae: 0.2547

13/13 [==============================] - 0s 27ms/step - loss: 0.1039 - mae: 0.2547 - val_loss: 0.0141 - val_mae: 0.1014
Epoch 10/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1154 - mae: 0.2705
13/13 [==============================] - 0s 3ms/step - loss: 0.0893 - mae: 0.2325

13/13 [==============================] - 0s 18ms/step - loss: 0.0893 - mae: 0.2325 - val_loss: 0.0118 - val_mae: 0.0910
Epoch 11/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0752 - mae: 0.2227
13/13 [==============================] - 0s 3ms/step - loss: 0.0926 - mae: 0.2372

13/13 [==============================] - 0s 18ms/step - loss: 0.0926 - mae: 0.2372 - val_loss: 0.0128 - val_mae: 0.0957
Epoch 12/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0803 - mae: 0.2253
13/13 [==============================] - 0s 3ms/step - loss: 0.0858 - mae: 0.2344

13/13 [==============================] - 0s 18ms/step - loss: 0.0858 - mae: 0.2344 - val_loss: 0.0128 - val_mae: 0.0968
Epoch 13/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1421 - mae: 0.2619
13/13 [==============================] - 0s 3ms/step - loss: 0.0735 - mae: 0.2148

13/13 [==============================] - 0s 19ms/step - loss: 0.0735 - mae: 0.2148 - val_loss: 0.0115 - val_mae: 0.0917
Epoch 14/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0689 - mae: 0.2103
13/13 [==============================] - 0s 3ms/step - loss: 0.0705 - mae: 0.2127

13/13 [==============================] - 0s 18ms/step - loss: 0.0705 - mae: 0.2127 - val_loss: 0.0116 - val_mae: 0.0925
Epoch 15/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0834 - mae: 0.2180
13/13 [==============================] - 0s 3ms/step - loss: 0.0639 - mae: 0.1960

13/13 [==============================] - 0s 19ms/step - loss: 0.0639 - mae: 0.1960 - val_loss: 0.0117 - val_mae: 0.0943
Epoch 16/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0197 - mae: 0.1105
13/13 [==============================] - 0s 3ms/step - loss: 0.0634 - mae: 0.1935

13/13 [==============================] - 0s 19ms/step - loss: 0.0634 - mae: 0.1935 - val_loss: 0.0115 - val_mae: 0.0928
Epoch 17/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0365 - mae: 0.1594
13/13 [==============================] - 0s 3ms/step - loss: 0.0530 - mae: 0.1829

13/13 [==============================] - 0s 19ms/step - loss: 0.0530 - mae: 0.1829 - val_loss: 0.0104 - val_mae: 0.0888
Epoch 18/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0491 - mae: 0.1856
13/13 [==============================] - 0s 3ms/step - loss: 0.0539 - mae: 0.1861

13/13 [==============================] - 0s 19ms/step - loss: 0.0539 - mae: 0.1861 - val_loss: 0.0094 - val_mae: 0.0849
Epoch 19/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0662 - mae: 0.2142
13/13 [==============================] - 0s 3ms/step - loss: 0.0709 - mae: 0.2060

13/13 [==============================] - 0s 20ms/step - loss: 0.0709 - mae: 0.2060 - val_loss: 0.0106 - val_mae: 0.0898
Epoch 20/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0718 - mae: 0.2101
13/13 [==============================] - 0s 3ms/step - loss: 0.0492 - mae: 0.1772

13/13 [==============================] - 0s 18ms/step - loss: 0.0492 - mae: 0.1772 - val_loss: 0.0094 - val_mae: 0.0845
Epoch 21/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0475 - mae: 0.1730
13/13 [==============================] - 0s 2ms/step - loss: 0.0480 - mae: 0.1731

13/13 [==============================] - 0s 19ms/step - loss: 0.0480 - mae: 0.1731 - val_loss: 0.0107 - val_mae: 0.0897
Epoch 22/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0373 - mae: 0.1561
13/13 [==============================] - 0s 3ms/step - loss: 0.0418 - mae: 0.1630

13/13 [==============================] - 0s 19ms/step - loss: 0.0418 - mae: 0.1630 - val_loss: 0.0101 - val_mae: 0.0862
Epoch 23/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0426 - mae: 0.1582
13/13 [==============================] - 0s 3ms/step - loss: 0.0474 - mae: 0.1685

13/13 [==============================] - 0s 18ms/step - loss: 0.0474 - mae: 0.1685 - val_loss: 0.0086 - val_mae: 0.0795
Epoch 24/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0264 - mae: 0.1253
13/13 [==============================] - 0s 3ms/step - loss: 0.0406 - mae: 0.1583

13/13 [==============================] - 0s 19ms/step - loss: 0.0406 - mae: 0.1583 - val_loss: 0.0087 - val_mae: 0.0805
Epoch 25/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0373 - mae: 0.1541
13/13 [==============================] - 0s 3ms/step - loss: 0.0397 - mae: 0.1556

13/13 [==============================] - 0s 19ms/step - loss: 0.0397 - mae: 0.1556 - val_loss: 0.0086 - val_mae: 0.0795
Epoch 26/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0324 - mae: 0.1376
13/13 [==============================] - 0s 3ms/step - loss: 0.0359 - mae: 0.1476

13/13 [==============================] - 0s 23ms/step - loss: 0.0359 - mae: 0.1476 - val_loss: 0.0078 - val_mae: 0.0753
Epoch 27/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0616 - mae: 0.1908
13/13 [==============================] - 0s 3ms/step - loss: 0.0378 - mae: 0.1503

13/13 [==============================] - 0s 18ms/step - loss: 0.0378 - mae: 0.1503 - val_loss: 0.0081 - val_mae: 0.0771
Epoch 28/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0244 - mae: 0.1285
13/13 [==============================] - 0s 3ms/step - loss: 0.0311 - mae: 0.1371

13/13 [==============================] - 0s 18ms/step - loss: 0.0311 - mae: 0.1371 - val_loss: 0.0080 - val_mae: 0.0755
Epoch 29/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0192 - mae: 0.1067
13/13 [==============================] - 0s 3ms/step - loss: 0.0344 - mae: 0.1424

13/13 [==============================] - 0s 21ms/step - loss: 0.0344 - mae: 0.1424 - val_loss: 0.0082 - val_mae: 0.0766
Epoch 30/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0355 - mae: 0.1477
13/13 [==============================] - 0s 3ms/step - loss: 0.0338 - mae: 0.1419

13/13 [==============================] - 0s 18ms/step - loss: 0.0338 - mae: 0.1419 - val_loss: 0.0081 - val_mae: 0.0769

Run completed: runs/2022-12-07T02-42-49Z

Training run 29/52 (flags = list(32, 10, 0.001, 50, 50, "tanh", "relu", 0.6, 0.6)) 
Using run directory runs/2022-12-07T02-43-11Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

1/8 [==>...........................] - ETA: 3s - loss: 3.1265 - mae: 1.3185
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0011s vs `on_train_batch_end` time: 0.0043s). Check your callbacks.

8/8 [==============================] - 1s 2ms/step - loss: 3.7302 - mae: 1.4324

8/8 [==============================] - 1s 107ms/step - loss: 3.7302 - mae: 1.4324 - val_loss: 1.2103 - val_mae: 1.0263
Epoch 2/50

1/8 [==>...........................] - ETA: 0s - loss: 5.1283 - mae: 1.6470
8/8 [==============================] - 0s 2ms/step - loss: 3.5015 - mae: 1.4100

8/8 [==============================] - 0s 29ms/step - loss: 3.5015 - mae: 1.4100 - val_loss: 0.9861 - val_mae: 0.9251
Epoch 3/50

1/8 [==>...........................] - ETA: 0s - loss: 2.6536 - mae: 1.2478
8/8 [==============================] - 0s 5ms/step - loss: 2.7985 - mae: 1.3036

8/8 [==============================] - 0s 63ms/step - loss: 2.7985 - mae: 1.3036 - val_loss: 0.8443 - val_mae: 0.8590
Epoch 4/50

1/8 [==>...........................] - ETA: 0s - loss: 2.0956 - mae: 1.1102
8/8 [==============================] - 0s 2ms/step - loss: 2.1762 - mae: 1.1617

8/8 [==============================] - 0s 33ms/step - loss: 2.1762 - mae: 1.1617 - val_loss: 0.7145 - val_mae: 0.7866
Epoch 5/50

1/8 [==>...........................] - ETA: 0s - loss: 2.0143 - mae: 1.0708
8/8 [==============================] - 0s 2ms/step - loss: 2.5963 - mae: 1.2344

8/8 [==============================] - 0s 36ms/step - loss: 2.5963 - mae: 1.2344 - val_loss: 0.5946 - val_mae: 0.7139
Epoch 6/50

1/8 [==>...........................] - ETA: 0s - loss: 2.4574 - mae: 1.1619
8/8 [==============================] - 0s 5ms/step - loss: 1.8630 - mae: 1.0426

8/8 [==============================] - 0s 63ms/step - loss: 1.8630 - mae: 1.0426 - val_loss: 0.5300 - val_mae: 0.6722
Epoch 7/50

1/8 [==>...........................] - ETA: 0s - loss: 2.5640 - mae: 1.1957
8/8 [==============================] - 0s 5ms/step - loss: 2.1614 - mae: 1.1550

8/8 [==============================] - 0s 46ms/step - loss: 2.1614 - mae: 1.1550 - val_loss: 0.4594 - val_mae: 0.6257
Epoch 8/50

1/8 [==>...........................] - ETA: 0s - loss: 2.2875 - mae: 1.1327
8/8 [==============================] - 0s 3ms/step - loss: 1.9707 - mae: 1.0981

8/8 [==============================] - 0s 28ms/step - loss: 1.9707 - mae: 1.0981 - val_loss: 0.3862 - val_mae: 0.5708
Epoch 9/50

1/8 [==>...........................] - ETA: 0s - loss: 1.8643 - mae: 0.9723
8/8 [==============================] - 0s 3ms/step - loss: 1.5171 - mae: 0.9470

8/8 [==============================] - 0s 31ms/step - loss: 1.5171 - mae: 0.9470 - val_loss: 0.3479 - val_mae: 0.5420
Epoch 10/50

1/8 [==>...........................] - ETA: 0s - loss: 2.0136 - mae: 1.1164
8/8 [==============================] - 0s 2ms/step - loss: 1.5498 - mae: 0.9842

8/8 [==============================] - 0s 33ms/step - loss: 1.5498 - mae: 0.9842 - val_loss: 0.3148 - val_mae: 0.5155
Epoch 11/50

1/8 [==>...........................] - ETA: 0s - loss: 1.6515 - mae: 0.9515
8/8 [==============================] - 0s 3ms/step - loss: 1.6078 - mae: 0.9282

8/8 [==============================] - 0s 28ms/step - loss: 1.6078 - mae: 0.9282 - val_loss: 0.2945 - val_mae: 0.5001
Epoch 12/50

1/8 [==>...........................] - ETA: 0s - loss: 1.0949 - mae: 0.8831
8/8 [==============================] - 0s 2ms/step - loss: 1.2732 - mae: 0.8713

8/8 [==============================] - 0s 31ms/step - loss: 1.2732 - mae: 0.8713 - val_loss: 0.2802 - val_mae: 0.4893
Epoch 13/50

1/8 [==>...........................] - ETA: 0s - loss: 0.9592 - mae: 0.7622
8/8 [==============================] - 0s 3ms/step - loss: 1.2772 - mae: 0.8764

8/8 [==============================] - 0s 31ms/step - loss: 1.2772 - mae: 0.8764 - val_loss: 0.2655 - val_mae: 0.4765
Epoch 14/50

1/8 [==>...........................] - ETA: 0s - loss: 1.1753 - mae: 0.8673
8/8 [==============================] - 0s 3ms/step - loss: 1.1782 - mae: 0.8614

8/8 [==============================] - 0s 31ms/step - loss: 1.1782 - mae: 0.8614 - val_loss: 0.2464 - val_mae: 0.4592
Epoch 15/50

1/8 [==>...........................] - ETA: 0s - loss: 0.8881 - mae: 0.7491
8/8 [==============================] - 0s 3ms/step - loss: 1.4041 - mae: 0.8995

8/8 [==============================] - 0s 33ms/step - loss: 1.4041 - mae: 0.8995 - val_loss: 0.2332 - val_mae: 0.4474
Epoch 16/50

1/8 [==>...........................] - ETA: 0s - loss: 0.9501 - mae: 0.7714
8/8 [==============================] - 0s 3ms/step - loss: 1.1387 - mae: 0.8389

8/8 [==============================] - 0s 29ms/step - loss: 1.1387 - mae: 0.8389 - val_loss: 0.2261 - val_mae: 0.4419
Epoch 17/50

1/8 [==>...........................] - ETA: 0s - loss: 0.7323 - mae: 0.6619
8/8 [==============================] - 0s 3ms/step - loss: 0.9885 - mae: 0.7647

8/8 [==============================] - 0s 31ms/step - loss: 0.9885 - mae: 0.7647 - val_loss: 0.2135 - val_mae: 0.4298
Epoch 18/50

1/8 [==>...........................] - ETA: 0s - loss: 1.3340 - mae: 0.8978
8/8 [==============================] - 0s 3ms/step - loss: 1.1544 - mae: 0.8450

8/8 [==============================] - 0s 31ms/step - loss: 1.1544 - mae: 0.8450 - val_loss: 0.2035 - val_mae: 0.4204
Epoch 19/50

1/8 [==>...........................] - ETA: 0s - loss: 0.8189 - mae: 0.7139
8/8 [==============================] - 0s 2ms/step - loss: 0.8978 - mae: 0.7504

8/8 [==============================] - 0s 29ms/step - loss: 0.8978 - mae: 0.7504 - val_loss: 0.1944 - val_mae: 0.4108
Epoch 20/50

1/8 [==>...........................] - ETA: 0s - loss: 1.1749 - mae: 0.8790
8/8 [==============================] - 0s 3ms/step - loss: 0.9173 - mae: 0.7464

8/8 [==============================] - 0s 31ms/step - loss: 0.9173 - mae: 0.7464 - val_loss: 0.1875 - val_mae: 0.4034
Epoch 21/50

1/8 [==>...........................] - ETA: 0s - loss: 1.1649 - mae: 0.8019
8/8 [==============================] - 0s 3ms/step - loss: 0.9633 - mae: 0.7411

8/8 [==============================] - 0s 33ms/step - loss: 0.9633 - mae: 0.7411 - val_loss: 0.1923 - val_mae: 0.4094
Epoch 22/50

1/8 [==>...........................] - ETA: 0s - loss: 0.9091 - mae: 0.7564
8/8 [==============================] - 0s 3ms/step - loss: 0.8725 - mae: 0.7203

8/8 [==============================] - 0s 31ms/step - loss: 0.8725 - mae: 0.7203 - val_loss: 0.1866 - val_mae: 0.4033
Epoch 23/50

1/8 [==>...........................] - ETA: 0s - loss: 0.8341 - mae: 0.7590
8/8 [==============================] - 0s 2ms/step - loss: 0.9125 - mae: 0.7410

8/8 [==============================] - 0s 31ms/step - loss: 0.9125 - mae: 0.7410 - val_loss: 0.1797 - val_mae: 0.3960
Epoch 24/50

1/8 [==>...........................] - ETA: 0s - loss: 1.3935 - mae: 0.9059
8/8 [==============================] - 0s 3ms/step - loss: 0.9371 - mae: 0.7547

8/8 [==============================] - 0s 31ms/step - loss: 0.9371 - mae: 0.7547 - val_loss: 0.1759 - val_mae: 0.3923
Epoch 25/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5881 - mae: 0.5639
8/8 [==============================] - 0s 3ms/step - loss: 0.7631 - mae: 0.6755

8/8 [==============================] - 0s 31ms/step - loss: 0.7631 - mae: 0.6755 - val_loss: 0.1703 - val_mae: 0.3864
Epoch 26/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4253 - mae: 0.5516
8/8 [==============================] - 0s 3ms/step - loss: 0.6292 - mae: 0.6283

8/8 [==============================] - 0s 29ms/step - loss: 0.6292 - mae: 0.6283 - val_loss: 0.1661 - val_mae: 0.3814
Epoch 27/50

1/8 [==>...........................] - ETA: 0s - loss: 0.9796 - mae: 0.7687
8/8 [==============================] - 0s 3ms/step - loss: 0.8539 - mae: 0.7266

8/8 [==============================] - 0s 33ms/step - loss: 0.8539 - mae: 0.7266 - val_loss: 0.1598 - val_mae: 0.3744
Epoch 28/50

1/8 [==>...........................] - ETA: 0s - loss: 0.7783 - mae: 0.6752
8/8 [==============================] - 0s 2ms/step - loss: 0.6738 - mae: 0.6315

8/8 [==============================] - 0s 31ms/step - loss: 0.6738 - mae: 0.6315 - val_loss: 0.1533 - val_mae: 0.3666
Epoch 29/50

1/8 [==>...........................] - ETA: 0s - loss: 1.2486 - mae: 0.8101
8/8 [==============================] - 0s 3ms/step - loss: 0.8334 - mae: 0.6698

8/8 [==============================] - 0s 31ms/step - loss: 0.8334 - mae: 0.6698 - val_loss: 0.1541 - val_mae: 0.3685
Epoch 30/50

1/8 [==>...........................] - ETA: 0s - loss: 1.2149 - mae: 0.7622
8/8 [==============================] - 0s 2ms/step - loss: 0.7422 - mae: 0.6514

8/8 [==============================] - 0s 31ms/step - loss: 0.7422 - mae: 0.6514 - val_loss: 0.1502 - val_mae: 0.3633
Epoch 31/50

1/8 [==>...........................] - ETA: 0s - loss: 0.8036 - mae: 0.7373
8/8 [==============================] - 0s 3ms/step - loss: 0.7813 - mae: 0.6629

8/8 [==============================] - 0s 31ms/step - loss: 0.7813 - mae: 0.6629 - val_loss: 0.1503 - val_mae: 0.3637
Epoch 32/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5607 - mae: 0.6340
8/8 [==============================] - 0s 3ms/step - loss: 0.5256 - mae: 0.5948

8/8 [==============================] - 0s 31ms/step - loss: 0.5256 - mae: 0.5948 - val_loss: 0.1460 - val_mae: 0.3580
Epoch 33/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6881 - mae: 0.5957
8/8 [==============================] - 0s 3ms/step - loss: 0.6901 - mae: 0.6310

8/8 [==============================] - 0s 33ms/step - loss: 0.6901 - mae: 0.6310 - val_loss: 0.1452 - val_mae: 0.3568
Epoch 34/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6734 - mae: 0.6563
8/8 [==============================] - 0s 3ms/step - loss: 0.6340 - mae: 0.6292

8/8 [==============================] - 0s 33ms/step - loss: 0.6340 - mae: 0.6292 - val_loss: 0.1415 - val_mae: 0.3524
Epoch 35/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6380 - mae: 0.6286
8/8 [==============================] - 0s 3ms/step - loss: 0.5636 - mae: 0.5975

8/8 [==============================] - 0s 31ms/step - loss: 0.5636 - mae: 0.5975 - val_loss: 0.1408 - val_mae: 0.3512
Epoch 36/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6581 - mae: 0.6179
8/8 [==============================] - 0s 2ms/step - loss: 0.6119 - mae: 0.6104

8/8 [==============================] - 0s 30ms/step - loss: 0.6119 - mae: 0.6104 - val_loss: 0.1362 - val_mae: 0.3450
Epoch 37/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5377 - mae: 0.5769
8/8 [==============================] - 0s 3ms/step - loss: 0.5620 - mae: 0.5874

8/8 [==============================] - 0s 33ms/step - loss: 0.5620 - mae: 0.5874 - val_loss: 0.1359 - val_mae: 0.3452
Epoch 38/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3938 - mae: 0.5358
8/8 [==============================] - 0s 2ms/step - loss: 0.5602 - mae: 0.5765

8/8 [==============================] - 0s 29ms/step - loss: 0.5602 - mae: 0.5765 - val_loss: 0.1334 - val_mae: 0.3413
Epoch 39/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4843 - mae: 0.5082
8/8 [==============================] - 0s 3ms/step - loss: 0.5900 - mae: 0.5992

8/8 [==============================] - 0s 33ms/step - loss: 0.5900 - mae: 0.5992 - val_loss: 0.1324 - val_mae: 0.3396
Epoch 40/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6590 - mae: 0.6498
8/8 [==============================] - 0s 4ms/step - loss: 0.5114 - mae: 0.5545

8/8 [==============================] - 0s 34ms/step - loss: 0.5114 - mae: 0.5545 - val_loss: 0.1316 - val_mae: 0.3379
Epoch 41/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4762 - mae: 0.5730
8/8 [==============================] - 0s 2ms/step - loss: 0.4969 - mae: 0.5471

8/8 [==============================] - 0s 31ms/step - loss: 0.4969 - mae: 0.5471 - val_loss: 0.1303 - val_mae: 0.3358
Epoch 42/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4026 - mae: 0.5314
8/8 [==============================] - 0s 3ms/step - loss: 0.5571 - mae: 0.5838

8/8 [==============================] - 0s 33ms/step - loss: 0.5571 - mae: 0.5838 - val_loss: 0.1287 - val_mae: 0.3336
Epoch 43/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3256 - mae: 0.4637
8/8 [==============================] - 0s 3ms/step - loss: 0.4604 - mae: 0.5363

8/8 [==============================] - 1s 85ms/step - loss: 0.4604 - mae: 0.5363 - val_loss: 0.1285 - val_mae: 0.3332
Epoch 44/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4737 - mae: 0.5607
8/8 [==============================] - 0s 2ms/step - loss: 0.4692 - mae: 0.5235

8/8 [==============================] - 0s 29ms/step - loss: 0.4692 - mae: 0.5235 - val_loss: 0.1287 - val_mae: 0.3332
Epoch 45/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4498 - mae: 0.5213
8/8 [==============================] - 0s 2ms/step - loss: 0.4918 - mae: 0.5369

8/8 [==============================] - 0s 29ms/step - loss: 0.4918 - mae: 0.5369 - val_loss: 0.1268 - val_mae: 0.3308
Epoch 46/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5868 - mae: 0.5873
8/8 [==============================] - 0s 3ms/step - loss: 0.4353 - mae: 0.5203

8/8 [==============================] - 0s 35ms/step - loss: 0.4353 - mae: 0.5203 - val_loss: 0.1247 - val_mae: 0.3271
Epoch 47/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5837 - mae: 0.5707
8/8 [==============================] - 0s 3ms/step - loss: 0.4689 - mae: 0.5303

8/8 [==============================] - 0s 30ms/step - loss: 0.4689 - mae: 0.5303 - val_loss: 0.1234 - val_mae: 0.3252
Epoch 48/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4073 - mae: 0.4603
8/8 [==============================] - 0s 2ms/step - loss: 0.5197 - mae: 0.5519

8/8 [==============================] - 0s 31ms/step - loss: 0.5197 - mae: 0.5519 - val_loss: 0.1222 - val_mae: 0.3232
Epoch 49/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5393 - mae: 0.5347
8/8 [==============================] - 0s 2ms/step - loss: 0.4613 - mae: 0.5092

8/8 [==============================] - 0s 31ms/step - loss: 0.4613 - mae: 0.5092 - val_loss: 0.1226 - val_mae: 0.3231
Epoch 50/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3756 - mae: 0.4843
8/8 [==============================] - 0s 3ms/step - loss: 0.4231 - mae: 0.5127

8/8 [==============================] - 0s 33ms/step - loss: 0.4231 - mae: 0.5127 - val_loss: 0.1202 - val_mae: 0.3195

Run completed: runs/2022-12-07T02-43-11Z

Training run 30/52 (flags = list(16, 32, 0.01, 30, 30, "relu", "relu", 0.6, 0.6)) 
Using run directory runs/2022-12-07T02-43-37Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

 1/13 [=>............................] - ETA: 5s - loss: 2.0722 - mae: 1.1365
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0012s vs `on_train_batch_end` time: 0.0028s). Check your callbacks.

13/13 [==============================] - 0s 2ms/step - loss: 1.0179 - mae: 0.7867

13/13 [==============================] - 1s 65ms/step - loss: 1.0179 - mae: 0.7867 - val_loss: 0.4278 - val_mae: 0.6385
Epoch 2/30

 1/13 [=>............................] - ETA: 0s - loss: 1.5492 - mae: 0.8870
13/13 [==============================] - 0s 3ms/step - loss: 0.6369 - mae: 0.6030

13/13 [==============================] - 0s 24ms/step - loss: 0.6369 - mae: 0.6030 - val_loss: 0.2315 - val_mae: 0.4681
Epoch 3/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4401 - mae: 0.5460
13/13 [==============================] - 0s 4ms/step - loss: 0.3525 - mae: 0.4744

13/13 [==============================] - 0s 36ms/step - loss: 0.3525 - mae: 0.4744 - val_loss: 0.1426 - val_mae: 0.3625
Epoch 4/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4147 - mae: 0.5209
13/13 [==============================] - 0s 3ms/step - loss: 0.2702 - mae: 0.4185

13/13 [==============================] - 0s 18ms/step - loss: 0.2702 - mae: 0.4185 - val_loss: 0.1136 - val_mae: 0.3195
Epoch 5/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2138 - mae: 0.4000
13/13 [==============================] - 0s 3ms/step - loss: 0.2359 - mae: 0.3654

13/13 [==============================] - 0s 20ms/step - loss: 0.2359 - mae: 0.3654 - val_loss: 0.0749 - val_mae: 0.2515
Epoch 6/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1669 - mae: 0.3638
13/13 [==============================] - 0s 4ms/step - loss: 0.1751 - mae: 0.3345

13/13 [==============================] - 0s 35ms/step - loss: 0.1751 - mae: 0.3345 - val_loss: 0.0697 - val_mae: 0.2415
Epoch 7/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1674 - mae: 0.3205
13/13 [==============================] - 0s 3ms/step - loss: 0.1658 - mae: 0.3164

13/13 [==============================] - 0s 22ms/step - loss: 0.1658 - mae: 0.3164 - val_loss: 0.0501 - val_mae: 0.2010
Epoch 8/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1595 - mae: 0.2622
13/13 [==============================] - 0s 3ms/step - loss: 0.1369 - mae: 0.2843

13/13 [==============================] - 0s 18ms/step - loss: 0.1369 - mae: 0.2843 - val_loss: 0.0500 - val_mae: 0.2009
Epoch 9/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1409 - mae: 0.3062
13/13 [==============================] - 0s 3ms/step - loss: 0.1493 - mae: 0.3011

13/13 [==============================] - 0s 19ms/step - loss: 0.1493 - mae: 0.3011 - val_loss: 0.0531 - val_mae: 0.2066
Epoch 10/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1614 - mae: 0.2995
13/13 [==============================] - 0s 3ms/step - loss: 0.1160 - mae: 0.2654

13/13 [==============================] - 0s 19ms/step - loss: 0.1160 - mae: 0.2654 - val_loss: 0.0593 - val_mae: 0.2197
Epoch 11/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1617 - mae: 0.3210
13/13 [==============================] - 0s 3ms/step - loss: 0.1011 - mae: 0.2592

13/13 [==============================] - 0s 18ms/step - loss: 0.1011 - mae: 0.2592 - val_loss: 0.0432 - val_mae: 0.1845
Epoch 12/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0786 - mae: 0.2172
13/13 [==============================] - 0s 3ms/step - loss: 0.0912 - mae: 0.2371

13/13 [==============================] - 0s 19ms/step - loss: 0.0912 - mae: 0.2371 - val_loss: 0.0436 - val_mae: 0.1865
Epoch 13/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1036 - mae: 0.2131
13/13 [==============================] - 0s 3ms/step - loss: 0.0880 - mae: 0.2218

13/13 [==============================] - 0s 18ms/step - loss: 0.0880 - mae: 0.2218 - val_loss: 0.0337 - val_mae: 0.1609
Epoch 14/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0955 - mae: 0.2381
13/13 [==============================] - 0s 2ms/step - loss: 0.0905 - mae: 0.2272

13/13 [==============================] - 0s 18ms/step - loss: 0.0905 - mae: 0.2272 - val_loss: 0.0279 - val_mae: 0.1433
Epoch 15/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0566 - mae: 0.2027
13/13 [==============================] - 0s 2ms/step - loss: 0.0789 - mae: 0.2199

13/13 [==============================] - 0s 19ms/step - loss: 0.0789 - mae: 0.2199 - val_loss: 0.0287 - val_mae: 0.1462
Epoch 16/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0427 - mae: 0.1537
13/13 [==============================] - 0s 3ms/step - loss: 0.0619 - mae: 0.1889

13/13 [==============================] - 0s 19ms/step - loss: 0.0619 - mae: 0.1889 - val_loss: 0.0271 - val_mae: 0.1396
Epoch 17/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0413 - mae: 0.1617
13/13 [==============================] - 0s 3ms/step - loss: 0.0520 - mae: 0.1839

13/13 [==============================] - 0s 21ms/step - loss: 0.0520 - mae: 0.1839 - val_loss: 0.0262 - val_mae: 0.1361
Epoch 18/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1025 - mae: 0.2310
13/13 [==============================] - 0s 3ms/step - loss: 0.0654 - mae: 0.1953

13/13 [==============================] - 0s 19ms/step - loss: 0.0654 - mae: 0.1953 - val_loss: 0.0265 - val_mae: 0.1377
Epoch 19/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0294 - mae: 0.1347
13/13 [==============================] - 0s 2ms/step - loss: 0.0544 - mae: 0.1797

13/13 [==============================] - 0s 19ms/step - loss: 0.0544 - mae: 0.1797 - val_loss: 0.0247 - val_mae: 0.1309
Epoch 20/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0336 - mae: 0.1606
13/13 [==============================] - 0s 2ms/step - loss: 0.0483 - mae: 0.1769

13/13 [==============================] - 0s 18ms/step - loss: 0.0483 - mae: 0.1769 - val_loss: 0.0249 - val_mae: 0.1321
Epoch 21/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0503 - mae: 0.1932
13/13 [==============================] - 0s 2ms/step - loss: 0.0461 - mae: 0.1706

13/13 [==============================] - 0s 19ms/step - loss: 0.0461 - mae: 0.1706 - val_loss: 0.0229 - val_mae: 0.1244
Epoch 22/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0484 - mae: 0.1748
13/13 [==============================] - 0s 2ms/step - loss: 0.0460 - mae: 0.1717

13/13 [==============================] - 0s 20ms/step - loss: 0.0460 - mae: 0.1717 - val_loss: 0.0230 - val_mae: 0.1251
Epoch 23/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0346 - mae: 0.1400
13/13 [==============================] - 0s 2ms/step - loss: 0.0423 - mae: 0.1598

13/13 [==============================] - 0s 18ms/step - loss: 0.0423 - mae: 0.1598 - val_loss: 0.0250 - val_mae: 0.1326
Epoch 24/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0312 - mae: 0.1394
13/13 [==============================] - 0s 2ms/step - loss: 0.0396 - mae: 0.1587

13/13 [==============================] - 0s 19ms/step - loss: 0.0396 - mae: 0.1587 - val_loss: 0.0223 - val_mae: 0.1216
Epoch 25/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0412 - mae: 0.1617
13/13 [==============================] - 0s 3ms/step - loss: 0.0417 - mae: 0.1593

13/13 [==============================] - 0s 20ms/step - loss: 0.0417 - mae: 0.1593 - val_loss: 0.0215 - val_mae: 0.1183
Epoch 26/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0381 - mae: 0.1545
13/13 [==============================] - 0s 2ms/step - loss: 0.0389 - mae: 0.1523

13/13 [==============================] - 0s 18ms/step - loss: 0.0389 - mae: 0.1523 - val_loss: 0.0215 - val_mae: 0.1187
Epoch 27/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0546 - mae: 0.1778
13/13 [==============================] - 0s 3ms/step - loss: 0.0452 - mae: 0.1647

13/13 [==============================] - 0s 19ms/step - loss: 0.0452 - mae: 0.1647 - val_loss: 0.0205 - val_mae: 0.1143
Epoch 28/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0276 - mae: 0.1338
13/13 [==============================] - 0s 3ms/step - loss: 0.0338 - mae: 0.1437

13/13 [==============================] - 0s 18ms/step - loss: 0.0338 - mae: 0.1437 - val_loss: 0.0198 - val_mae: 0.1122
Epoch 29/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0276 - mae: 0.1222
13/13 [==============================] - 0s 2ms/step - loss: 0.0306 - mae: 0.1379

13/13 [==============================] - 0s 18ms/step - loss: 0.0306 - mae: 0.1379 - val_loss: 0.0199 - val_mae: 0.1124
Epoch 30/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0206 - mae: 0.1127
13/13 [==============================] - 0s 3ms/step - loss: 0.0298 - mae: 0.1357

13/13 [==============================] - 0s 18ms/step - loss: 0.0298 - mae: 0.1357 - val_loss: 0.0190 - val_mae: 0.1086

Run completed: runs/2022-12-07T02-43-37Z

Training run 31/52 (flags = list(64, 10, 0.001, 30, 30, "relu", "sigmoid", 0.2, 0.6)) 
Using run directory runs/2022-12-07T02-43-58Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

 1/13 [=>............................] - ETA: 5s - loss: 2.3215 - mae: 1.2915
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0022s vs `on_train_batch_end` time: 0.0048s). Check your callbacks.

13/13 [==============================] - 1s 2ms/step - loss: 2.2888 - mae: 1.2326

13/13 [==============================] - 1s 64ms/step - loss: 2.2888 - mae: 1.2326 - val_loss: 0.9223 - val_mae: 0.9447
Epoch 2/30

 1/13 [=>............................] - ETA: 0s - loss: 1.8813 - mae: 1.1572
13/13 [==============================] - 0s 3ms/step - loss: 2.0801 - mae: 1.1977

13/13 [==============================] - 0s 22ms/step - loss: 2.0801 - mae: 1.1977 - val_loss: 0.7460 - val_mae: 0.8460
Epoch 3/30

 1/13 [=>............................] - ETA: 0s - loss: 1.7439 - mae: 1.0974
13/13 [==============================] - 0s 4ms/step - loss: 1.7054 - mae: 1.0634

13/13 [==============================] - 0s 37ms/step - loss: 1.7054 - mae: 1.0634 - val_loss: 0.5918 - val_mae: 0.7498
Epoch 4/30

 1/13 [=>............................] - ETA: 0s - loss: 0.9593 - mae: 0.8412
13/13 [==============================] - 0s 3ms/step - loss: 1.5999 - mae: 1.0088

13/13 [==============================] - 0s 22ms/step - loss: 1.5999 - mae: 1.0088 - val_loss: 0.4797 - val_mae: 0.6705
Epoch 5/30

 1/13 [=>............................] - ETA: 0s - loss: 1.9985 - mae: 1.1411
13/13 [==============================] - 0s 3ms/step - loss: 1.4110 - mae: 0.9568

13/13 [==============================] - 0s 20ms/step - loss: 1.4110 - mae: 0.9568 - val_loss: 0.4051 - val_mae: 0.6127
Epoch 6/30

 1/13 [=>............................] - ETA: 0s - loss: 1.9459 - mae: 1.2076
13/13 [==============================] - 0s 4ms/step - loss: 1.2743 - mae: 0.9217

13/13 [==============================] - 0s 37ms/step - loss: 1.2743 - mae: 0.9217 - val_loss: 0.3433 - val_mae: 0.5605
Epoch 7/30

 1/13 [=>............................] - ETA: 0s - loss: 1.4967 - mae: 0.9319
13/13 [==============================] - 0s 4ms/step - loss: 1.2818 - mae: 0.9245

13/13 [==============================] - 0s 27ms/step - loss: 1.2818 - mae: 0.9245 - val_loss: 0.2827 - val_mae: 0.5041
Epoch 8/30

 1/13 [=>............................] - ETA: 0s - loss: 1.0792 - mae: 0.8007
13/13 [==============================] - 0s 3ms/step - loss: 1.1117 - mae: 0.8571

13/13 [==============================] - 0s 19ms/step - loss: 1.1117 - mae: 0.8571 - val_loss: 0.2425 - val_mae: 0.4627
Epoch 9/30

 1/13 [=>............................] - ETA: 0s - loss: 1.5168 - mae: 1.0419
13/13 [==============================] - 0s 3ms/step - loss: 1.2690 - mae: 0.9423

13/13 [==============================] - 0s 18ms/step - loss: 1.2690 - mae: 0.9423 - val_loss: 0.1998 - val_mae: 0.4150
Epoch 10/30

 1/13 [=>............................] - ETA: 0s - loss: 0.9373 - mae: 0.8387
13/13 [==============================] - 0s 2ms/step - loss: 1.0127 - mae: 0.8111

13/13 [==============================] - 0s 19ms/step - loss: 1.0127 - mae: 0.8111 - val_loss: 0.1779 - val_mae: 0.3893
Epoch 11/30

 1/13 [=>............................] - ETA: 0s - loss: 0.9177 - mae: 0.7627
13/13 [==============================] - 0s 2ms/step - loss: 1.0977 - mae: 0.8633

13/13 [==============================] - 0s 18ms/step - loss: 1.0977 - mae: 0.8633 - val_loss: 0.1502 - val_mae: 0.3540
Epoch 12/30

 1/13 [=>............................] - ETA: 0s - loss: 1.3468 - mae: 0.9387
13/13 [==============================] - 0s 3ms/step - loss: 1.0897 - mae: 0.8435

13/13 [==============================] - 0s 19ms/step - loss: 1.0897 - mae: 0.8435 - val_loss: 0.1266 - val_mae: 0.3211
Epoch 13/30

 1/13 [=>............................] - ETA: 0s - loss: 0.6417 - mae: 0.6495
13/13 [==============================] - 0s 3ms/step - loss: 0.8587 - mae: 0.7424

13/13 [==============================] - 0s 19ms/step - loss: 0.8587 - mae: 0.7424 - val_loss: 0.1097 - val_mae: 0.2965
Epoch 14/30

 1/13 [=>............................] - ETA: 0s - loss: 0.9795 - mae: 0.8277
13/13 [==============================] - 0s 3ms/step - loss: 0.9789 - mae: 0.7957

13/13 [==============================] - 0s 19ms/step - loss: 0.9789 - mae: 0.7957 - val_loss: 0.0968 - val_mae: 0.2767
Epoch 15/30

 1/13 [=>............................] - ETA: 0s - loss: 0.9733 - mae: 0.7747
13/13 [==============================] - 0s 3ms/step - loss: 0.8156 - mae: 0.7238

13/13 [==============================] - 0s 20ms/step - loss: 0.8156 - mae: 0.7238 - val_loss: 0.0875 - val_mae: 0.2609
Epoch 16/30

 1/13 [=>............................] - ETA: 0s - loss: 0.5807 - mae: 0.6616
13/13 [==============================] - 0s 3ms/step - loss: 0.8744 - mae: 0.7581

13/13 [==============================] - 0s 21ms/step - loss: 0.8744 - mae: 0.7581 - val_loss: 0.0793 - val_mae: 0.2467
Epoch 17/30

 1/13 [=>............................] - ETA: 0s - loss: 0.9237 - mae: 0.7813
13/13 [==============================] - 0s 3ms/step - loss: 0.7900 - mae: 0.7133

13/13 [==============================] - 0s 19ms/step - loss: 0.7900 - mae: 0.7133 - val_loss: 0.0770 - val_mae: 0.2429
Epoch 18/30

 1/13 [=>............................] - ETA: 0s - loss: 0.8164 - mae: 0.7873
13/13 [==============================] - 0s 3ms/step - loss: 0.7472 - mae: 0.6933

13/13 [==============================] - 0s 19ms/step - loss: 0.7472 - mae: 0.6933 - val_loss: 0.0705 - val_mae: 0.2311
Epoch 19/30

 1/13 [=>............................] - ETA: 0s - loss: 0.7852 - mae: 0.7108
13/13 [==============================] - 0s 3ms/step - loss: 0.7667 - mae: 0.6959

13/13 [==============================] - 0s 19ms/step - loss: 0.7667 - mae: 0.6959 - val_loss: 0.0670 - val_mae: 0.2242
Epoch 20/30

 1/13 [=>............................] - ETA: 0s - loss: 1.0130 - mae: 0.8972
13/13 [==============================] - 0s 3ms/step - loss: 0.7843 - mae: 0.7210

13/13 [==============================] - 0s 20ms/step - loss: 0.7843 - mae: 0.7210 - val_loss: 0.0664 - val_mae: 0.2237
Epoch 21/30

 1/13 [=>............................] - ETA: 0s - loss: 0.6948 - mae: 0.7121
13/13 [==============================] - 0s 3ms/step - loss: 0.6976 - mae: 0.6659

13/13 [==============================] - 0s 18ms/step - loss: 0.6976 - mae: 0.6659 - val_loss: 0.0583 - val_mae: 0.2088
Epoch 22/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4939 - mae: 0.5801
13/13 [==============================] - 0s 3ms/step - loss: 0.7234 - mae: 0.7079

13/13 [==============================] - 0s 19ms/step - loss: 0.7234 - mae: 0.7079 - val_loss: 0.0540 - val_mae: 0.2004
Epoch 23/30

 1/13 [=>............................] - ETA: 0s - loss: 0.5399 - mae: 0.5531
13/13 [==============================] - 0s 2ms/step - loss: 0.6368 - mae: 0.6440

13/13 [==============================] - 0s 21ms/step - loss: 0.6368 - mae: 0.6440 - val_loss: 0.0507 - val_mae: 0.1937
Epoch 24/30

 1/13 [=>............................] - ETA: 0s - loss: 0.8903 - mae: 0.7758
13/13 [==============================] - 0s 3ms/step - loss: 0.7636 - mae: 0.7101

13/13 [==============================] - 0s 19ms/step - loss: 0.7636 - mae: 0.7101 - val_loss: 0.0526 - val_mae: 0.1980
Epoch 25/30

 1/13 [=>............................] - ETA: 0s - loss: 0.6809 - mae: 0.7130
13/13 [==============================] - 0s 3ms/step - loss: 0.8160 - mae: 0.7356

13/13 [==============================] - 0s 19ms/step - loss: 0.8160 - mae: 0.7356 - val_loss: 0.0506 - val_mae: 0.1946
Epoch 26/30

 1/13 [=>............................] - ETA: 0s - loss: 0.7431 - mae: 0.6415
13/13 [==============================] - 0s 3ms/step - loss: 0.5994 - mae: 0.6281

13/13 [==============================] - 0s 18ms/step - loss: 0.5994 - mae: 0.6281 - val_loss: 0.0494 - val_mae: 0.1919
Epoch 27/30

 1/13 [=>............................] - ETA: 0s - loss: 0.5429 - mae: 0.6276
13/13 [==============================] - 0s 2ms/step - loss: 0.5593 - mae: 0.6050

13/13 [==============================] - 0s 18ms/step - loss: 0.5593 - mae: 0.6050 - val_loss: 0.0465 - val_mae: 0.1859
Epoch 28/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4627 - mae: 0.5532
13/13 [==============================] - 0s 3ms/step - loss: 0.6262 - mae: 0.6362

13/13 [==============================] - 0s 20ms/step - loss: 0.6262 - mae: 0.6362 - val_loss: 0.0450 - val_mae: 0.1827
Epoch 29/30

 1/13 [=>............................] - ETA: 0s - loss: 0.5887 - mae: 0.6435
13/13 [==============================] - 0s 3ms/step - loss: 0.5788 - mae: 0.6156

13/13 [==============================] - 0s 18ms/step - loss: 0.5788 - mae: 0.6156 - val_loss: 0.0429 - val_mae: 0.1781
Epoch 30/30

 1/13 [=>............................] - ETA: 0s - loss: 0.6339 - mae: 0.6768
13/13 [==============================] - 0s 3ms/step - loss: 0.5942 - mae: 0.6373

13/13 [==============================] - 0s 20ms/step - loss: 0.5942 - mae: 0.6373 - val_loss: 0.0398 - val_mae: 0.1705

Run completed: runs/2022-12-07T02-43-58Z

Training run 32/52 (flags = list(16, 50, 0.01, 50, 50, "sigmoid", "sigmoid", 0.2, 0.2)) 
Using run directory runs/2022-12-07T02-44-19Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

1/8 [==>...........................] - ETA: 3s - loss: 0.8497 - mae: 0.8346
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0018s vs `on_train_batch_end` time: 0.0040s). Check your callbacks.

8/8 [==============================] - 0s 2ms/step - loss: 0.3375 - mae: 0.4538

8/8 [==============================] - 1s 111ms/step - loss: 0.3375 - mae: 0.4538 - val_loss: 0.0300 - val_mae: 0.1421
Epoch 2/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1483 - mae: 0.3002
8/8 [==============================] - 0s 4ms/step - loss: 0.1404 - mae: 0.3021

8/8 [==============================] - 0s 38ms/step - loss: 0.1404 - mae: 0.3021 - val_loss: 0.0231 - val_mae: 0.1209
Epoch 3/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1314 - mae: 0.2884
8/8 [==============================] - 0s 6ms/step - loss: 0.1221 - mae: 0.2810

8/8 [==============================] - 0s 59ms/step - loss: 0.1221 - mae: 0.2810 - val_loss: 0.0235 - val_mae: 0.1226
Epoch 4/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1457 - mae: 0.3086
8/8 [==============================] - 0s 2ms/step - loss: 0.1352 - mae: 0.2921

8/8 [==============================] - 0s 27ms/step - loss: 0.1352 - mae: 0.2921 - val_loss: 0.0209 - val_mae: 0.1146
Epoch 5/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1552 - mae: 0.3342
8/8 [==============================] - 0s 3ms/step - loss: 0.1131 - mae: 0.2714

8/8 [==============================] - 0s 38ms/step - loss: 0.1131 - mae: 0.2714 - val_loss: 0.0199 - val_mae: 0.1105
Epoch 6/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0888 - mae: 0.2429
8/8 [==============================] - 0s 4ms/step - loss: 0.1159 - mae: 0.2768

8/8 [==============================] - 0s 62ms/step - loss: 0.1159 - mae: 0.2768 - val_loss: 0.0199 - val_mae: 0.1103
Epoch 7/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1098 - mae: 0.2599
8/8 [==============================] - 0s 3ms/step - loss: 0.1172 - mae: 0.2756

8/8 [==============================] - 0s 39ms/step - loss: 0.1172 - mae: 0.2756 - val_loss: 0.0212 - val_mae: 0.1138
Epoch 8/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1086 - mae: 0.2594
8/8 [==============================] - 0s 3ms/step - loss: 0.1144 - mae: 0.2706

8/8 [==============================] - 0s 32ms/step - loss: 0.1144 - mae: 0.2706 - val_loss: 0.0193 - val_mae: 0.1098
Epoch 9/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1155 - mae: 0.2862
8/8 [==============================] - 0s 2ms/step - loss: 0.1139 - mae: 0.2687

8/8 [==============================] - 0s 28ms/step - loss: 0.1139 - mae: 0.2687 - val_loss: 0.0207 - val_mae: 0.1143
Epoch 10/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1577 - mae: 0.3207
8/8 [==============================] - 0s 3ms/step - loss: 0.1159 - mae: 0.2669

8/8 [==============================] - 0s 41ms/step - loss: 0.1159 - mae: 0.2669 - val_loss: 0.0189 - val_mae: 0.1074
Epoch 11/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0843 - mae: 0.2342
8/8 [==============================] - 0s 4ms/step - loss: 0.0977 - mae: 0.2477

8/8 [==============================] - 0s 62ms/step - loss: 0.0977 - mae: 0.2477 - val_loss: 0.0214 - val_mae: 0.1170
Epoch 12/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1116 - mae: 0.2838
8/8 [==============================] - 0s 3ms/step - loss: 0.1077 - mae: 0.2633

8/8 [==============================] - 0s 39ms/step - loss: 0.1077 - mae: 0.2633 - val_loss: 0.0185 - val_mae: 0.1063
Epoch 13/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1021 - mae: 0.2535
8/8 [==============================] - 0s 3ms/step - loss: 0.1038 - mae: 0.2578

8/8 [==============================] - 0s 29ms/step - loss: 0.1038 - mae: 0.2578 - val_loss: 0.0185 - val_mae: 0.1073
Epoch 14/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0661 - mae: 0.2028
8/8 [==============================] - 0s 3ms/step - loss: 0.0946 - mae: 0.2478

8/8 [==============================] - 0s 36ms/step - loss: 0.0946 - mae: 0.2478 - val_loss: 0.0206 - val_mae: 0.1146
Epoch 15/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1327 - mae: 0.2798
8/8 [==============================] - 0s 3ms/step - loss: 0.1114 - mae: 0.2662

8/8 [==============================] - 0s 31ms/step - loss: 0.1114 - mae: 0.2662 - val_loss: 0.0182 - val_mae: 0.1064
Epoch 16/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1218 - mae: 0.2864
8/8 [==============================] - 0s 2ms/step - loss: 0.1028 - mae: 0.2511

8/8 [==============================] - 0s 34ms/step - loss: 0.1028 - mae: 0.2511 - val_loss: 0.0233 - val_mae: 0.1209
Epoch 17/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1062 - mae: 0.2668
8/8 [==============================] - 0s 3ms/step - loss: 0.1099 - mae: 0.2681

8/8 [==============================] - 0s 48ms/step - loss: 0.1099 - mae: 0.2681 - val_loss: 0.0194 - val_mae: 0.1082
Epoch 18/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1044 - mae: 0.2609
8/8 [==============================] - 0s 5ms/step - loss: 0.1001 - mae: 0.2562

8/8 [==============================] - 0s 62ms/step - loss: 0.1001 - mae: 0.2562 - val_loss: 0.0186 - val_mae: 0.1074
Epoch 19/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0759 - mae: 0.2120
8/8 [==============================] - 0s 3ms/step - loss: 0.0833 - mae: 0.2338

8/8 [==============================] - 0s 30ms/step - loss: 0.0833 - mae: 0.2338 - val_loss: 0.0175 - val_mae: 0.1031
Epoch 20/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0846 - mae: 0.2401
8/8 [==============================] - 0s 3ms/step - loss: 0.0949 - mae: 0.2425

8/8 [==============================] - 0s 33ms/step - loss: 0.0949 - mae: 0.2425 - val_loss: 0.0175 - val_mae: 0.1043
Epoch 21/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0740 - mae: 0.2209
8/8 [==============================] - 0s 3ms/step - loss: 0.0885 - mae: 0.2381

8/8 [==============================] - 0s 33ms/step - loss: 0.0885 - mae: 0.2381 - val_loss: 0.0173 - val_mae: 0.1037
Epoch 22/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1124 - mae: 0.2775
8/8 [==============================] - 0s 3ms/step - loss: 0.0894 - mae: 0.2350

8/8 [==============================] - 0s 35ms/step - loss: 0.0894 - mae: 0.2350 - val_loss: 0.0169 - val_mae: 0.1020
Epoch 23/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0737 - mae: 0.2293
8/8 [==============================] - 0s 3ms/step - loss: 0.0856 - mae: 0.2363

8/8 [==============================] - 0s 33ms/step - loss: 0.0856 - mae: 0.2363 - val_loss: 0.0171 - val_mae: 0.1017
Epoch 24/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0805 - mae: 0.2382
8/8 [==============================] - 0s 3ms/step - loss: 0.0771 - mae: 0.2227

8/8 [==============================] - 0s 29ms/step - loss: 0.0771 - mae: 0.2227 - val_loss: 0.0173 - val_mae: 0.1033
Epoch 25/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0631 - mae: 0.1967
8/8 [==============================] - 0s 3ms/step - loss: 0.0784 - mae: 0.2230

8/8 [==============================] - 0s 33ms/step - loss: 0.0784 - mae: 0.2230 - val_loss: 0.0221 - val_mae: 0.1211
Epoch 26/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1043 - mae: 0.2668
8/8 [==============================] - 0s 3ms/step - loss: 0.0811 - mae: 0.2227

8/8 [==============================] - 0s 31ms/step - loss: 0.0811 - mae: 0.2227 - val_loss: 0.0172 - val_mae: 0.1029
Epoch 27/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0858 - mae: 0.2370
8/8 [==============================] - 0s 3ms/step - loss: 0.0746 - mae: 0.2202

8/8 [==============================] - 0s 33ms/step - loss: 0.0746 - mae: 0.2202 - val_loss: 0.0167 - val_mae: 0.1006
Epoch 28/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0636 - mae: 0.2064
8/8 [==============================] - 0s 3ms/step - loss: 0.0745 - mae: 0.2228

8/8 [==============================] - 0s 39ms/step - loss: 0.0745 - mae: 0.2228 - val_loss: 0.0165 - val_mae: 0.1007
Epoch 29/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0663 - mae: 0.2131
8/8 [==============================] - 0s 2ms/step - loss: 0.0780 - mae: 0.2278

8/8 [==============================] - 0s 31ms/step - loss: 0.0780 - mae: 0.2278 - val_loss: 0.0164 - val_mae: 0.1001
Epoch 30/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0683 - mae: 0.1769
8/8 [==============================] - 0s 3ms/step - loss: 0.0715 - mae: 0.2059

8/8 [==============================] - 0s 31ms/step - loss: 0.0715 - mae: 0.2059 - val_loss: 0.0175 - val_mae: 0.1022
Epoch 31/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0650 - mae: 0.1994
8/8 [==============================] - 0s 4ms/step - loss: 0.0697 - mae: 0.2117

8/8 [==============================] - 0s 44ms/step - loss: 0.0697 - mae: 0.2117 - val_loss: 0.0162 - val_mae: 0.0997
Epoch 32/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0781 - mae: 0.2232
8/8 [==============================] - 0s 2ms/step - loss: 0.0725 - mae: 0.2172

8/8 [==============================] - 0s 33ms/step - loss: 0.0725 - mae: 0.2172 - val_loss: 0.0163 - val_mae: 0.1004
Epoch 33/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0650 - mae: 0.2136
8/8 [==============================] - 0s 3ms/step - loss: 0.0685 - mae: 0.2131

8/8 [==============================] - 0s 31ms/step - loss: 0.0685 - mae: 0.2131 - val_loss: 0.0164 - val_mae: 0.0992
Epoch 34/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0610 - mae: 0.1936
8/8 [==============================] - 0s 3ms/step - loss: 0.0764 - mae: 0.2196

8/8 [==============================] - 0s 31ms/step - loss: 0.0764 - mae: 0.2196 - val_loss: 0.0170 - val_mae: 0.1024
Epoch 35/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0862 - mae: 0.2382
8/8 [==============================] - 0s 3ms/step - loss: 0.0699 - mae: 0.2097

8/8 [==============================] - 0s 36ms/step - loss: 0.0699 - mae: 0.2097 - val_loss: 0.0170 - val_mae: 0.1006
Epoch 36/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0460 - mae: 0.1779
8/8 [==============================] - 0s 3ms/step - loss: 0.0607 - mae: 0.1923

8/8 [==============================] - 0s 31ms/step - loss: 0.0607 - mae: 0.1923 - val_loss: 0.0159 - val_mae: 0.0984
Epoch 37/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0892 - mae: 0.2364
8/8 [==============================] - 0s 2ms/step - loss: 0.0724 - mae: 0.2131

8/8 [==============================] - 0s 31ms/step - loss: 0.0724 - mae: 0.2131 - val_loss: 0.0159 - val_mae: 0.0981
Epoch 38/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0760 - mae: 0.2084
8/8 [==============================] - 0s 3ms/step - loss: 0.0626 - mae: 0.1943

8/8 [==============================] - 0s 33ms/step - loss: 0.0626 - mae: 0.1943 - val_loss: 0.0163 - val_mae: 0.0998
Epoch 39/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0737 - mae: 0.2141
8/8 [==============================] - 0s 3ms/step - loss: 0.0686 - mae: 0.2103

8/8 [==============================] - 0s 31ms/step - loss: 0.0686 - mae: 0.2103 - val_loss: 0.0156 - val_mae: 0.0976
Epoch 40/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0874 - mae: 0.2344
8/8 [==============================] - 0s 3ms/step - loss: 0.0751 - mae: 0.2190

8/8 [==============================] - 0s 34ms/step - loss: 0.0751 - mae: 0.2190 - val_loss: 0.0158 - val_mae: 0.0974
Epoch 41/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0800 - mae: 0.2288
8/8 [==============================] - 0s 3ms/step - loss: 0.0665 - mae: 0.2051

8/8 [==============================] - 0s 31ms/step - loss: 0.0665 - mae: 0.2051 - val_loss: 0.0155 - val_mae: 0.0970
Epoch 42/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0716 - mae: 0.2130
8/8 [==============================] - 0s 3ms/step - loss: 0.0614 - mae: 0.1951

8/8 [==============================] - 0s 31ms/step - loss: 0.0614 - mae: 0.1951 - val_loss: 0.0157 - val_mae: 0.0983
Epoch 43/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0662 - mae: 0.2158
8/8 [==============================] - 0s 3ms/step - loss: 0.0609 - mae: 0.1966

8/8 [==============================] - 0s 33ms/step - loss: 0.0609 - mae: 0.1966 - val_loss: 0.0153 - val_mae: 0.0965
Epoch 44/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0621 - mae: 0.2009
8/8 [==============================] - 0s 3ms/step - loss: 0.0654 - mae: 0.2046

8/8 [==============================] - 0s 31ms/step - loss: 0.0654 - mae: 0.2046 - val_loss: 0.0155 - val_mae: 0.0976
Epoch 45/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0335 - mae: 0.1530
8/8 [==============================] - 0s 2ms/step - loss: 0.0581 - mae: 0.1901

8/8 [==============================] - 0s 33ms/step - loss: 0.0581 - mae: 0.1901 - val_loss: 0.0153 - val_mae: 0.0962
Epoch 46/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0857 - mae: 0.2362
8/8 [==============================] - 0s 3ms/step - loss: 0.0642 - mae: 0.2041

8/8 [==============================] - 0s 31ms/step - loss: 0.0642 - mae: 0.2041 - val_loss: 0.0157 - val_mae: 0.0980
Epoch 47/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0547 - mae: 0.1960
8/8 [==============================] - 0s 3ms/step - loss: 0.0515 - mae: 0.1840

8/8 [==============================] - 0s 33ms/step - loss: 0.0515 - mae: 0.1840 - val_loss: 0.0152 - val_mae: 0.0960
Epoch 48/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0350 - mae: 0.1539
8/8 [==============================] - 0s 3ms/step - loss: 0.0621 - mae: 0.2017

8/8 [==============================] - 0s 33ms/step - loss: 0.0621 - mae: 0.2017 - val_loss: 0.0154 - val_mae: 0.0960
Epoch 49/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0474 - mae: 0.1698
8/8 [==============================] - 0s 3ms/step - loss: 0.0581 - mae: 0.1940

8/8 [==============================] - 0s 31ms/step - loss: 0.0581 - mae: 0.1940 - val_loss: 0.0151 - val_mae: 0.0962
Epoch 50/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0354 - mae: 0.1446
8/8 [==============================] - 0s 3ms/step - loss: 0.0536 - mae: 0.1838

8/8 [==============================] - 0s 34ms/step - loss: 0.0536 - mae: 0.1838 - val_loss: 0.0158 - val_mae: 0.0970

Run completed: runs/2022-12-07T02-44-19Z

Training run 33/52 (flags = list(16, 32, 0.01, 30, 50, "relu", "relu", 0.2, 0.2)) 
Using run directory runs/2022-12-07T02-44-45Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

 1/13 [=>............................] - ETA: 6s - loss: 0.7516 - mae: 0.7905
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0017s vs `on_train_batch_end` time: 0.0035s). Check your callbacks.

13/13 [==============================] - 1s 3ms/step - loss: 0.4222 - mae: 0.5505

13/13 [==============================] - 1s 79ms/step - loss: 0.4222 - mae: 0.5505 - val_loss: 0.0959 - val_mae: 0.2770
Epoch 2/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2324 - mae: 0.4010
13/13 [==============================] - 0s 2ms/step - loss: 0.1482 - mae: 0.3175

13/13 [==============================] - 0s 19ms/step - loss: 0.1482 - mae: 0.3175 - val_loss: 0.0527 - val_mae: 0.1959
Epoch 3/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1189 - mae: 0.2940
13/13 [==============================] - 0s 3ms/step - loss: 0.1391 - mae: 0.2963

13/13 [==============================] - 0s 20ms/step - loss: 0.1391 - mae: 0.2963 - val_loss: 0.0441 - val_mae: 0.1808
Epoch 4/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1912 - mae: 0.3675
13/13 [==============================] - 0s 3ms/step - loss: 0.1270 - mae: 0.2827

13/13 [==============================] - 0s 19ms/step - loss: 0.1270 - mae: 0.2827 - val_loss: 0.0349 - val_mae: 0.1602
Epoch 5/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0968 - mae: 0.2453
13/13 [==============================] - 0s 4ms/step - loss: 0.0901 - mae: 0.2432

13/13 [==============================] - 0s 34ms/step - loss: 0.0901 - mae: 0.2432 - val_loss: 0.0310 - val_mae: 0.1524
Epoch 6/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0988 - mae: 0.2712
13/13 [==============================] - 0s 4ms/step - loss: 0.0897 - mae: 0.2376

13/13 [==============================] - 0s 28ms/step - loss: 0.0897 - mae: 0.2376 - val_loss: 0.0279 - val_mae: 0.1379
Epoch 7/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1069 - mae: 0.2839
13/13 [==============================] - 0s 3ms/step - loss: 0.0784 - mae: 0.2257

13/13 [==============================] - 0s 18ms/step - loss: 0.0784 - mae: 0.2257 - val_loss: 0.0256 - val_mae: 0.1373
Epoch 8/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0605 - mae: 0.2130
13/13 [==============================] - 0s 3ms/step - loss: 0.0751 - mae: 0.2260

13/13 [==============================] - 1s 53ms/step - loss: 0.0751 - mae: 0.2260 - val_loss: 0.0222 - val_mae: 0.1223
Epoch 9/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0902 - mae: 0.2530
13/13 [==============================] - 0s 4ms/step - loss: 0.0734 - mae: 0.2146

13/13 [==============================] - 0s 37ms/step - loss: 0.0734 - mae: 0.2146 - val_loss: 0.0199 - val_mae: 0.1200
Epoch 10/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0761 - mae: 0.2348
11/13 [========================>.....] - ETA: 0s - loss: 0.0681 - mae: 0.2002
13/13 [==============================] - 0s 6ms/step - loss: 0.0697 - mae: 0.2036

13/13 [==============================] - 0s 40ms/step - loss: 0.0697 - mae: 0.2036 - val_loss: 0.0189 - val_mae: 0.1173
Epoch 11/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0577 - mae: 0.2089
13/13 [==============================] - 0s 4ms/step - loss: 0.0558 - mae: 0.1879

13/13 [==============================] - 0s 32ms/step - loss: 0.0558 - mae: 0.1879 - val_loss: 0.0185 - val_mae: 0.1154
Epoch 12/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0351 - mae: 0.1556
13/13 [==============================] - 0s 3ms/step - loss: 0.0622 - mae: 0.1941

13/13 [==============================] - 0s 26ms/step - loss: 0.0622 - mae: 0.1941 - val_loss: 0.0168 - val_mae: 0.1101
Epoch 13/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0583 - mae: 0.2020
13/13 [==============================] - 0s 4ms/step - loss: 0.0525 - mae: 0.1797

13/13 [==============================] - 0s 35ms/step - loss: 0.0525 - mae: 0.1797 - val_loss: 0.0156 - val_mae: 0.1062
Epoch 14/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0585 - mae: 0.1863
12/13 [==========================>...] - ETA: 0s - loss: 0.0486 - mae: 0.1700
13/13 [==============================] - 0s 5ms/step - loss: 0.0487 - mae: 0.1702

13/13 [==============================] - 0s 38ms/step - loss: 0.0487 - mae: 0.1702 - val_loss: 0.0174 - val_mae: 0.1152
Epoch 15/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0527 - mae: 0.1808
13/13 [==============================] - 0s 3ms/step - loss: 0.0514 - mae: 0.1772

13/13 [==============================] - 0s 25ms/step - loss: 0.0514 - mae: 0.1772 - val_loss: 0.0145 - val_mae: 0.1030
Epoch 16/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0648 - mae: 0.2062
13/13 [==============================] - 0s 3ms/step - loss: 0.0489 - mae: 0.1752

13/13 [==============================] - 0s 33ms/step - loss: 0.0489 - mae: 0.1752 - val_loss: 0.0128 - val_mae: 0.0943
Epoch 17/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0566 - mae: 0.1757
13/13 [==============================] - 0s 4ms/step - loss: 0.0451 - mae: 0.1676

13/13 [==============================] - 0s 38ms/step - loss: 0.0451 - mae: 0.1676 - val_loss: 0.0136 - val_mae: 0.0994
Epoch 18/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0517 - mae: 0.1585
12/13 [==========================>...] - ETA: 0s - loss: 0.0498 - mae: 0.1676
13/13 [==============================] - 0s 5ms/step - loss: 0.0500 - mae: 0.1682

13/13 [==============================] - 0s 36ms/step - loss: 0.0500 - mae: 0.1682 - val_loss: 0.0118 - val_mae: 0.0895
Epoch 19/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0483 - mae: 0.1584
13/13 [==============================] - 0s 4ms/step - loss: 0.0434 - mae: 0.1642

13/13 [==============================] - 0s 37ms/step - loss: 0.0434 - mae: 0.1642 - val_loss: 0.0119 - val_mae: 0.0904
Epoch 20/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0402 - mae: 0.1684
12/13 [==========================>...] - ETA: 0s - loss: 0.0405 - mae: 0.1564
13/13 [==============================] - 0s 5ms/step - loss: 0.0404 - mae: 0.1560

13/13 [==============================] - 0s 35ms/step - loss: 0.0404 - mae: 0.1560 - val_loss: 0.0120 - val_mae: 0.0915
Epoch 21/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0308 - mae: 0.1405
13/13 [==============================] - 0s 3ms/step - loss: 0.0446 - mae: 0.1659

13/13 [==============================] - 0s 30ms/step - loss: 0.0446 - mae: 0.1659 - val_loss: 0.0117 - val_mae: 0.0901
Epoch 22/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0201 - mae: 0.1183
13/13 [==============================] - 0s 4ms/step - loss: 0.0360 - mae: 0.1516

13/13 [==============================] - 0s 38ms/step - loss: 0.0360 - mae: 0.1516 - val_loss: 0.0116 - val_mae: 0.0893
Epoch 23/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0320 - mae: 0.1343
13/13 [==============================] - 0s 4ms/step - loss: 0.0424 - mae: 0.1603

13/13 [==============================] - 0s 39ms/step - loss: 0.0424 - mae: 0.1603 - val_loss: 0.0104 - val_mae: 0.0831
Epoch 24/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0307 - mae: 0.1446
 9/13 [===================>..........] - ETA: 0s - loss: 0.0433 - mae: 0.1578
13/13 [==============================] - 0s 6ms/step - loss: 0.0412 - mae: 0.1555

13/13 [==============================] - 0s 38ms/step - loss: 0.0412 - mae: 0.1555 - val_loss: 0.0122 - val_mae: 0.0909
Epoch 25/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0271 - mae: 0.1325
12/13 [==========================>...] - ETA: 0s - loss: 0.0325 - mae: 0.1429
13/13 [==============================] - 0s 5ms/step - loss: 0.0325 - mae: 0.1431

13/13 [==============================] - 0s 35ms/step - loss: 0.0325 - mae: 0.1431 - val_loss: 0.0112 - val_mae: 0.0864
Epoch 26/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0621 - mae: 0.1763
13/13 [==============================] - 0s 4ms/step - loss: 0.0366 - mae: 0.1504

13/13 [==============================] - 0s 34ms/step - loss: 0.0366 - mae: 0.1504 - val_loss: 0.0115 - val_mae: 0.0881
Epoch 27/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0285 - mae: 0.1127
13/13 [==============================] - 0s 4ms/step - loss: 0.0339 - mae: 0.1448

13/13 [==============================] - 0s 34ms/step - loss: 0.0339 - mae: 0.1448 - val_loss: 0.0102 - val_mae: 0.0821
Epoch 28/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0440 - mae: 0.1669
13/13 [==============================] - 0s 2ms/step - loss: 0.0343 - mae: 0.1441

13/13 [==============================] - 0s 18ms/step - loss: 0.0343 - mae: 0.1441 - val_loss: 0.0099 - val_mae: 0.0814
Epoch 29/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0499 - mae: 0.1743
13/13 [==============================] - 0s 3ms/step - loss: 0.0388 - mae: 0.1477

13/13 [==============================] - 0s 19ms/step - loss: 0.0388 - mae: 0.1477 - val_loss: 0.0091 - val_mae: 0.0771
Epoch 30/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0449 - mae: 0.1637
13/13 [==============================] - 0s 4ms/step - loss: 0.0334 - mae: 0.1419

13/13 [==============================] - 0s 34ms/step - loss: 0.0334 - mae: 0.1419 - val_loss: 0.0094 - val_mae: 0.0792
Epoch 31/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0496 - mae: 0.1553
12/13 [==========================>...] - ETA: 0s - loss: 0.0328 - mae: 0.1405
13/13 [==============================] - 0s 5ms/step - loss: 0.0329 - mae: 0.1409

13/13 [==============================] - 0s 40ms/step - loss: 0.0329 - mae: 0.1409 - val_loss: 0.0089 - val_mae: 0.0774
Epoch 32/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0283 - mae: 0.1319
11/13 [========================>.....] - ETA: 0s - loss: 0.0297 - mae: 0.1356
13/13 [==============================] - 0s 5ms/step - loss: 0.0296 - mae: 0.1344

13/13 [==============================] - 0s 37ms/step - loss: 0.0296 - mae: 0.1344 - val_loss: 0.0084 - val_mae: 0.0742
Epoch 33/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0494 - mae: 0.1759
13/13 [==============================] - 0s 3ms/step - loss: 0.0314 - mae: 0.1392

13/13 [==============================] - 0s 30ms/step - loss: 0.0314 - mae: 0.1392 - val_loss: 0.0084 - val_mae: 0.0746
Epoch 34/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0362 - mae: 0.1495
13/13 [==============================] - 0s 2ms/step - loss: 0.0271 - mae: 0.1270

13/13 [==============================] - 0s 18ms/step - loss: 0.0271 - mae: 0.1270 - val_loss: 0.0085 - val_mae: 0.0739
Epoch 35/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0331 - mae: 0.1364
13/13 [==============================] - 0s 3ms/step - loss: 0.0262 - mae: 0.1282

13/13 [==============================] - 0s 24ms/step - loss: 0.0262 - mae: 0.1282 - val_loss: 0.0085 - val_mae: 0.0756
Epoch 36/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0351 - mae: 0.1367
12/13 [==========================>...] - ETA: 0s - loss: 0.0313 - mae: 0.1357
13/13 [==============================] - 0s 5ms/step - loss: 0.0312 - mae: 0.1354

13/13 [==============================] - 0s 38ms/step - loss: 0.0312 - mae: 0.1354 - val_loss: 0.0084 - val_mae: 0.0752
Epoch 37/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0255 - mae: 0.1320
13/13 [==============================] - 0s 3ms/step - loss: 0.0285 - mae: 0.1321

13/13 [==============================] - 0s 23ms/step - loss: 0.0285 - mae: 0.1321 - val_loss: 0.0083 - val_mae: 0.0747
Epoch 38/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0325 - mae: 0.1335
13/13 [==============================] - 0s 4ms/step - loss: 0.0289 - mae: 0.1319

13/13 [==============================] - 0s 25ms/step - loss: 0.0289 - mae: 0.1319 - val_loss: 0.0087 - val_mae: 0.0780
Epoch 39/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0259 - mae: 0.1266
11/13 [========================>.....] - ETA: 0s - loss: 0.0257 - mae: 0.1214
13/13 [==============================] - 0s 5ms/step - loss: 0.0250 - mae: 0.1210

13/13 [==============================] - 0s 39ms/step - loss: 0.0250 - mae: 0.1210 - val_loss: 0.0084 - val_mae: 0.0760
Epoch 40/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0305 - mae: 0.1428
13/13 [==============================] - 0s 2ms/step - loss: 0.0246 - mae: 0.1218

13/13 [==============================] - 0s 18ms/step - loss: 0.0246 - mae: 0.1218 - val_loss: 0.0078 - val_mae: 0.0729
Epoch 41/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0262 - mae: 0.1222
13/13 [==============================] - 0s 4ms/step - loss: 0.0243 - mae: 0.1210

13/13 [==============================] - 0s 27ms/step - loss: 0.0243 - mae: 0.1210 - val_loss: 0.0078 - val_mae: 0.0732
Epoch 42/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0193 - mae: 0.1216
12/13 [==========================>...] - ETA: 0s - loss: 0.0217 - mae: 0.1147
13/13 [==============================] - 0s 5ms/step - loss: 0.0216 - mae: 0.1145

13/13 [==============================] - 0s 38ms/step - loss: 0.0216 - mae: 0.1145 - val_loss: 0.0079 - val_mae: 0.0737
Epoch 43/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0424 - mae: 0.1671
13/13 [==============================] - 0s 2ms/step - loss: 0.0256 - mae: 0.1252

13/13 [==============================] - 0s 18ms/step - loss: 0.0256 - mae: 0.1252 - val_loss: 0.0099 - val_mae: 0.0848
Epoch 44/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0162 - mae: 0.1126
13/13 [==============================] - 0s 3ms/step - loss: 0.0234 - mae: 0.1184

13/13 [==============================] - 0s 24ms/step - loss: 0.0234 - mae: 0.1184 - val_loss: 0.0084 - val_mae: 0.0772
Epoch 45/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0350 - mae: 0.1422
13/13 [==============================] - 0s 3ms/step - loss: 0.0242 - mae: 0.1206

13/13 [==============================] - 0s 28ms/step - loss: 0.0242 - mae: 0.1206 - val_loss: 0.0083 - val_mae: 0.0770
Epoch 46/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0397 - mae: 0.1493
13/13 [==============================] - 0s 4ms/step - loss: 0.0229 - mae: 0.1201

13/13 [==============================] - 0s 32ms/step - loss: 0.0229 - mae: 0.1201 - val_loss: 0.0076 - val_mae: 0.0732
Epoch 47/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0269 - mae: 0.1241
13/13 [==============================] - 0s 4ms/step - loss: 0.0229 - mae: 0.1172

13/13 [==============================] - 0s 34ms/step - loss: 0.0229 - mae: 0.1172 - val_loss: 0.0075 - val_mae: 0.0727
Epoch 48/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0254 - mae: 0.1340
13/13 [==============================] - 0s 3ms/step - loss: 0.0197 - mae: 0.1133

13/13 [==============================] - 0s 21ms/step - loss: 0.0197 - mae: 0.1133 - val_loss: 0.0077 - val_mae: 0.0736
Epoch 49/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0264 - mae: 0.1262
13/13 [==============================] - 0s 4ms/step - loss: 0.0239 - mae: 0.1221

13/13 [==============================] - 0s 37ms/step - loss: 0.0239 - mae: 0.1221 - val_loss: 0.0071 - val_mae: 0.0704
Epoch 50/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0172 - mae: 0.1066
13/13 [==============================] - 0s 4ms/step - loss: 0.0204 - mae: 0.1121

13/13 [==============================] - 0s 34ms/step - loss: 0.0204 - mae: 0.1121 - val_loss: 0.0069 - val_mae: 0.0684

Run completed: runs/2022-12-07T02-44-45Z

Training run 34/52 (flags = list(64, 32, 0.001, 50, 30, "sigmoid", "tanh", 0.2, 0.2)) 
Using run directory runs/2022-12-07T02-45-19Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

1/8 [==>...........................] - ETA: 3s - loss: 0.5049 - mae: 0.5960
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0020s vs `on_train_batch_end` time: 0.0033s). Check your callbacks.

8/8 [==============================] - 1s 3ms/step - loss: 0.4142 - mae: 0.5221

8/8 [==============================] - 1s 113ms/step - loss: 0.4142 - mae: 0.5221 - val_loss: 0.1244 - val_mae: 0.2870
Epoch 2/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2292 - mae: 0.3888
8/8 [==============================] - 0s 3ms/step - loss: 0.2969 - mae: 0.4344

8/8 [==============================] - 0s 32ms/step - loss: 0.2969 - mae: 0.4344 - val_loss: 0.0789 - val_mae: 0.2195
Epoch 3/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2821 - mae: 0.4170
8/8 [==============================] - 0s 3ms/step - loss: 0.2550 - mae: 0.3971

8/8 [==============================] - 0s 32ms/step - loss: 0.2550 - mae: 0.3971 - val_loss: 0.0599 - val_mae: 0.1913
Epoch 4/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2443 - mae: 0.3978
8/8 [==============================] - 0s 3ms/step - loss: 0.2646 - mae: 0.4229

8/8 [==============================] - 0s 39ms/step - loss: 0.2646 - mae: 0.4229 - val_loss: 0.0526 - val_mae: 0.1836
Epoch 5/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3016 - mae: 0.4540
8/8 [==============================] - 0s 5ms/step - loss: 0.2768 - mae: 0.4224

8/8 [==============================] - 0s 61ms/step - loss: 0.2768 - mae: 0.4224 - val_loss: 0.0487 - val_mae: 0.1789
Epoch 6/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3011 - mae: 0.4629
8/8 [==============================] - 0s 5ms/step - loss: 0.2577 - mae: 0.4094

8/8 [==============================] - 0s 52ms/step - loss: 0.2577 - mae: 0.4094 - val_loss: 0.0470 - val_mae: 0.1762
Epoch 7/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2844 - mae: 0.4181
8/8 [==============================] - 0s 2ms/step - loss: 0.2259 - mae: 0.3846

8/8 [==============================] - 0s 30ms/step - loss: 0.2259 - mae: 0.3846 - val_loss: 0.0455 - val_mae: 0.1743
Epoch 8/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2816 - mae: 0.4270
8/8 [==============================] - 0s 3ms/step - loss: 0.2251 - mae: 0.3791

8/8 [==============================] - 0s 35ms/step - loss: 0.2251 - mae: 0.3791 - val_loss: 0.0440 - val_mae: 0.1725
Epoch 9/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2358 - mae: 0.3691
8/8 [==============================] - 0s 7ms/step - loss: 0.2159 - mae: 0.3736

8/8 [==============================] - 0s 35ms/step - loss: 0.2159 - mae: 0.3736 - val_loss: 0.0427 - val_mae: 0.1698
Epoch 10/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1566 - mae: 0.3238
8/8 [==============================] - 0s 3ms/step - loss: 0.2301 - mae: 0.3931

8/8 [==============================] - 0s 33ms/step - loss: 0.2301 - mae: 0.3931 - val_loss: 0.0416 - val_mae: 0.1690
Epoch 11/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2287 - mae: 0.4092
8/8 [==============================] - 0s 5ms/step - loss: 0.2681 - mae: 0.4220

8/8 [==============================] - 0s 69ms/step - loss: 0.2681 - mae: 0.4220 - val_loss: 0.0399 - val_mae: 0.1644
Epoch 12/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2795 - mae: 0.4348
8/8 [==============================] - 0s 3ms/step - loss: 0.2415 - mae: 0.3920

8/8 [==============================] - 0s 43ms/step - loss: 0.2415 - mae: 0.3920 - val_loss: 0.0394 - val_mae: 0.1659
Epoch 13/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2171 - mae: 0.3936
8/8 [==============================] - 0s 3ms/step - loss: 0.2889 - mae: 0.4259

8/8 [==============================] - 0s 30ms/step - loss: 0.2889 - mae: 0.4259 - val_loss: 0.0382 - val_mae: 0.1638
Epoch 14/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3033 - mae: 0.4484
8/8 [==============================] - 0s 3ms/step - loss: 0.2520 - mae: 0.3934

8/8 [==============================] - 0s 38ms/step - loss: 0.2520 - mae: 0.3934 - val_loss: 0.0367 - val_mae: 0.1602
Epoch 15/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1588 - mae: 0.3264
8/8 [==============================] - 0s 3ms/step - loss: 0.2328 - mae: 0.3869

8/8 [==============================] - 0s 33ms/step - loss: 0.2328 - mae: 0.3869 - val_loss: 0.0353 - val_mae: 0.1566
Epoch 16/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2967 - mae: 0.4115
8/8 [==============================] - 0s 3ms/step - loss: 0.2384 - mae: 0.3881

8/8 [==============================] - 0s 33ms/step - loss: 0.2384 - mae: 0.3881 - val_loss: 0.0355 - val_mae: 0.1577
Epoch 17/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2012 - mae: 0.3611
8/8 [==============================] - 0s 3ms/step - loss: 0.2318 - mae: 0.3867

8/8 [==============================] - 0s 38ms/step - loss: 0.2318 - mae: 0.3867 - val_loss: 0.0343 - val_mae: 0.1546
Epoch 18/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1645 - mae: 0.3368
8/8 [==============================] - 0s 3ms/step - loss: 0.2038 - mae: 0.3611

8/8 [==============================] - 0s 36ms/step - loss: 0.2038 - mae: 0.3611 - val_loss: 0.0322 - val_mae: 0.1486
Epoch 19/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1394 - mae: 0.3036
8/8 [==============================] - 0s 3ms/step - loss: 0.2280 - mae: 0.3784

8/8 [==============================] - 0s 33ms/step - loss: 0.2280 - mae: 0.3784 - val_loss: 0.0320 - val_mae: 0.1488
Epoch 20/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2715 - mae: 0.3997
8/8 [==============================] - 0s 3ms/step - loss: 0.2301 - mae: 0.3831

8/8 [==============================] - 0s 38ms/step - loss: 0.2301 - mae: 0.3831 - val_loss: 0.0326 - val_mae: 0.1501
Epoch 21/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3263 - mae: 0.4517
8/8 [==============================] - 0s 3ms/step - loss: 0.2250 - mae: 0.3794

8/8 [==============================] - 0s 31ms/step - loss: 0.2250 - mae: 0.3794 - val_loss: 0.0306 - val_mae: 0.1450
Epoch 22/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2298 - mae: 0.3807
8/8 [==============================] - 0s 3ms/step - loss: 0.2034 - mae: 0.3631

8/8 [==============================] - 0s 36ms/step - loss: 0.2034 - mae: 0.3631 - val_loss: 0.0293 - val_mae: 0.1414
Epoch 23/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2002 - mae: 0.3475
8/8 [==============================] - 0s 3ms/step - loss: 0.2166 - mae: 0.3687

8/8 [==============================] - 0s 33ms/step - loss: 0.2166 - mae: 0.3687 - val_loss: 0.0289 - val_mae: 0.1406
Epoch 24/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1737 - mae: 0.3370
8/8 [==============================] - 0s 3ms/step - loss: 0.2008 - mae: 0.3598

8/8 [==============================] - 0s 33ms/step - loss: 0.2008 - mae: 0.3598 - val_loss: 0.0273 - val_mae: 0.1355
Epoch 25/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2070 - mae: 0.3709
8/8 [==============================] - 0s 3ms/step - loss: 0.2301 - mae: 0.3817

8/8 [==============================] - 0s 36ms/step - loss: 0.2301 - mae: 0.3817 - val_loss: 0.0273 - val_mae: 0.1358
Epoch 26/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1888 - mae: 0.3567
8/8 [==============================] - 0s 3ms/step - loss: 0.2406 - mae: 0.3933

8/8 [==============================] - 0s 31ms/step - loss: 0.2406 - mae: 0.3933 - val_loss: 0.0258 - val_mae: 0.1315
Epoch 27/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2594 - mae: 0.3958
8/8 [==============================] - 0s 4ms/step - loss: 0.2237 - mae: 0.3797

8/8 [==============================] - 0s 38ms/step - loss: 0.2237 - mae: 0.3797 - val_loss: 0.0254 - val_mae: 0.1303
Epoch 28/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2224 - mae: 0.3586
8/8 [==============================] - 0s 3ms/step - loss: 0.2071 - mae: 0.3624

8/8 [==============================] - 0s 31ms/step - loss: 0.2071 - mae: 0.3624 - val_loss: 0.0248 - val_mae: 0.1285
Epoch 29/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1821 - mae: 0.3569
8/8 [==============================] - 0s 3ms/step - loss: 0.2099 - mae: 0.3698

8/8 [==============================] - 0s 36ms/step - loss: 0.2099 - mae: 0.3698 - val_loss: 0.0238 - val_mae: 0.1252
Epoch 30/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1706 - mae: 0.3099
8/8 [==============================] - 0s 3ms/step - loss: 0.2021 - mae: 0.3524

8/8 [==============================] - 0s 35ms/step - loss: 0.2021 - mae: 0.3524 - val_loss: 0.0231 - val_mae: 0.1226

Run completed: runs/2022-12-07T02-45-19Z

Training run 35/52 (flags = list(32, 50, 0.001, 30, 50, "relu", "tanh", 0.6, 0.6)) 
Using run directory runs/2022-12-07T02-45-43Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

 1/13 [=>............................] - ETA: 9s - loss: 1.7201 - mae: 1.0828
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0018s vs `on_train_batch_end` time: 0.0032s). Check your callbacks.

13/13 [==============================] - 1s 3ms/step - loss: 1.3212 - mae: 0.9520

13/13 [==============================] - 2s 71ms/step - loss: 1.3212 - mae: 0.9520 - val_loss: 0.4646 - val_mae: 0.6535
Epoch 2/50

 1/13 [=>............................] - ETA: 0s - loss: 0.9231 - mae: 0.7978
13/13 [==============================] - 0s 3ms/step - loss: 1.0867 - mae: 0.8580

13/13 [==============================] - 0s 19ms/step - loss: 1.0867 - mae: 0.8580 - val_loss: 0.3480 - val_mae: 0.5575
Epoch 3/50

 1/13 [=>............................] - ETA: 0s - loss: 1.0344 - mae: 0.8342
13/13 [==============================] - 0s 3ms/step - loss: 1.0399 - mae: 0.8302

13/13 [==============================] - 0s 19ms/step - loss: 1.0399 - mae: 0.8302 - val_loss: 0.2488 - val_mae: 0.4594
Epoch 4/50

 1/13 [=>............................] - ETA: 0s - loss: 0.8324 - mae: 0.7325
13/13 [==============================] - 0s 3ms/step - loss: 0.7919 - mae: 0.7208

13/13 [==============================] - 0s 22ms/step - loss: 0.7919 - mae: 0.7208 - val_loss: 0.2037 - val_mae: 0.4080
Epoch 5/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7250 - mae: 0.6796
13/13 [==============================] - 0s 4ms/step - loss: 0.8374 - mae: 0.7391

13/13 [==============================] - 0s 22ms/step - loss: 0.8374 - mae: 0.7391 - val_loss: 0.1681 - val_mae: 0.3663
Epoch 6/50

 1/13 [=>............................] - ETA: 0s - loss: 0.9303 - mae: 0.8041
13/13 [==============================] - 0s 3ms/step - loss: 0.6860 - mae: 0.6549

13/13 [==============================] - 0s 18ms/step - loss: 0.6860 - mae: 0.6549 - val_loss: 0.1357 - val_mae: 0.3257
Epoch 7/50

 1/13 [=>............................] - ETA: 0s - loss: 0.8079 - mae: 0.7128
13/13 [==============================] - 0s 3ms/step - loss: 0.8190 - mae: 0.7151

13/13 [==============================] - 0s 19ms/step - loss: 0.8190 - mae: 0.7151 - val_loss: 0.1141 - val_mae: 0.2985
Epoch 8/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6198 - mae: 0.6167
13/13 [==============================] - 0s 3ms/step - loss: 0.6516 - mae: 0.6499

13/13 [==============================] - 0s 20ms/step - loss: 0.6516 - mae: 0.6499 - val_loss: 0.0962 - val_mae: 0.2742
Epoch 9/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5509 - mae: 0.5972
11/13 [========================>.....] - ETA: 0s - loss: 0.6239 - mae: 0.6261
13/13 [==============================] - 0s 5ms/step - loss: 0.6261 - mae: 0.6321

13/13 [==============================] - 0s 32ms/step - loss: 0.6261 - mae: 0.6321 - val_loss: 0.0802 - val_mae: 0.2468
Epoch 10/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4092 - mae: 0.5170
13/13 [==============================] - 0s 4ms/step - loss: 0.5804 - mae: 0.5944

13/13 [==============================] - 0s 29ms/step - loss: 0.5804 - mae: 0.5944 - val_loss: 0.0714 - val_mae: 0.2300
Epoch 11/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7554 - mae: 0.6638
13/13 [==============================] - 0s 3ms/step - loss: 0.5629 - mae: 0.5905

13/13 [==============================] - 0s 20ms/step - loss: 0.5629 - mae: 0.5905 - val_loss: 0.0658 - val_mae: 0.2184
Epoch 12/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5480 - mae: 0.5857
13/13 [==============================] - 0s 3ms/step - loss: 0.6912 - mae: 0.6609

13/13 [==============================] - 0s 21ms/step - loss: 0.6912 - mae: 0.6609 - val_loss: 0.0626 - val_mae: 0.2134
Epoch 13/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7569 - mae: 0.6702
13/13 [==============================] - 0s 3ms/step - loss: 0.6408 - mae: 0.6209

13/13 [==============================] - 0s 18ms/step - loss: 0.6408 - mae: 0.6209 - val_loss: 0.0606 - val_mae: 0.2099
Epoch 14/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6748 - mae: 0.5806
13/13 [==============================] - 0s 3ms/step - loss: 0.5826 - mae: 0.5856

13/13 [==============================] - 0s 22ms/step - loss: 0.5826 - mae: 0.5856 - val_loss: 0.0571 - val_mae: 0.2020
Epoch 15/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5916 - mae: 0.6250
13/13 [==============================] - 0s 3ms/step - loss: 0.4569 - mae: 0.5340

13/13 [==============================] - 230s 19s/step - loss: 0.4569 - mae: 0.5340 - val_loss: 0.0531 - val_mae: 0.1921
Epoch 16/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7525 - mae: 0.6442
12/13 [==========================>...] - ETA: 0s - loss: 0.5902 - mae: 0.5787
13/13 [==============================] - 0s 5ms/step - loss: 0.5918 - mae: 0.5797

13/13 [==============================] - 0s 34ms/step - loss: 0.5918 - mae: 0.5797 - val_loss: 0.0493 - val_mae: 0.1843
Epoch 17/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3127 - mae: 0.4304
10/13 [======================>.......] - ETA: 0s - loss: 0.4952 - mae: 0.5622
13/13 [==============================] - 0s 6ms/step - loss: 0.5033 - mae: 0.5620

13/13 [==============================] - 1s 48ms/step - loss: 0.5033 - mae: 0.5620 - val_loss: 0.0458 - val_mae: 0.1749
Epoch 18/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7037 - mae: 0.6092
11/13 [========================>.....] - ETA: 0s - loss: 0.4736 - mae: 0.5280
13/13 [==============================] - 0s 5ms/step - loss: 0.4561 - mae: 0.5168

13/13 [==============================] - 0s 31ms/step - loss: 0.4561 - mae: 0.5168 - val_loss: 0.0441 - val_mae: 0.1723
Epoch 19/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5144 - mae: 0.5937
13/13 [==============================] - 0s 2ms/step - loss: 0.5156 - mae: 0.5510

13/13 [==============================] - 0s 17ms/step - loss: 0.5156 - mae: 0.5510 - val_loss: 0.0416 - val_mae: 0.1679
Epoch 20/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4040 - mae: 0.5144
13/13 [==============================] - 0s 4ms/step - loss: 0.5141 - mae: 0.5575

13/13 [==============================] - 0s 20ms/step - loss: 0.5141 - mae: 0.5575 - val_loss: 0.0395 - val_mae: 0.1631
Epoch 21/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2939 - mae: 0.4188
13/13 [==============================] - 0s 3ms/step - loss: 0.4207 - mae: 0.4985

13/13 [==============================] - 0s 32ms/step - loss: 0.4207 - mae: 0.4985 - val_loss: 0.0378 - val_mae: 0.1596
Epoch 22/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4100 - mae: 0.5263
13/13 [==============================] - 0s 2ms/step - loss: 0.5119 - mae: 0.5477

13/13 [==============================] - 0s 25ms/step - loss: 0.5119 - mae: 0.5477 - val_loss: 0.0361 - val_mae: 0.1567
Epoch 23/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4946 - mae: 0.5707
11/13 [========================>.....] - ETA: 0s - loss: 0.4593 - mae: 0.5238
13/13 [==============================] - 0s 5ms/step - loss: 0.4545 - mae: 0.5239

13/13 [==============================] - 0s 22ms/step - loss: 0.4545 - mae: 0.5239 - val_loss: 0.0348 - val_mae: 0.1547
Epoch 24/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3691 - mae: 0.4942
13/13 [==============================] - 0s 2ms/step - loss: 0.3731 - mae: 0.4749

13/13 [==============================] - 0s 18ms/step - loss: 0.3731 - mae: 0.4749 - val_loss: 0.0336 - val_mae: 0.1521
Epoch 25/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3781 - mae: 0.4015
13/13 [==============================] - 0s 3ms/step - loss: 0.4601 - mae: 0.5194

13/13 [==============================] - 0s 34ms/step - loss: 0.4601 - mae: 0.5194 - val_loss: 0.0325 - val_mae: 0.1492
Epoch 26/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5059 - mae: 0.6265
13/13 [==============================] - 0s 2ms/step - loss: 0.4428 - mae: 0.5292

13/13 [==============================] - 0s 19ms/step - loss: 0.4428 - mae: 0.5292 - val_loss: 0.0314 - val_mae: 0.1463
Epoch 27/50

 1/13 [=>............................] - ETA: 0s - loss: 0.8704 - mae: 0.6886
13/13 [==============================] - 0s 2ms/step - loss: 0.4326 - mae: 0.5066

13/13 [==============================] - 0s 16ms/step - loss: 0.4326 - mae: 0.5066 - val_loss: 0.0308 - val_mae: 0.1439
Epoch 28/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4861 - mae: 0.5833
13/13 [==============================] - 0s 2ms/step - loss: 0.4074 - mae: 0.4934

13/13 [==============================] - 0s 17ms/step - loss: 0.4074 - mae: 0.4934 - val_loss: 0.0295 - val_mae: 0.1407
Epoch 29/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2919 - mae: 0.4374
13/13 [==============================] - 0s 2ms/step - loss: 0.4174 - mae: 0.4994

13/13 [==============================] - 0s 15ms/step - loss: 0.4174 - mae: 0.4994 - val_loss: 0.0286 - val_mae: 0.1370
Epoch 30/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3778 - mae: 0.5128
13/13 [==============================] - 0s 2ms/step - loss: 0.3697 - mae: 0.4880

13/13 [==============================] - 0s 16ms/step - loss: 0.3697 - mae: 0.4880 - val_loss: 0.0276 - val_mae: 0.1350
Epoch 31/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3877 - mae: 0.4706
13/13 [==============================] - 0s 2ms/step - loss: 0.4162 - mae: 0.5001

13/13 [==============================] - 0s 16ms/step - loss: 0.4162 - mae: 0.5001 - val_loss: 0.0268 - val_mae: 0.1329
Epoch 32/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2869 - mae: 0.4172
13/13 [==============================] - 0s 2ms/step - loss: 0.3677 - mae: 0.4742

13/13 [==============================] - 0s 19ms/step - loss: 0.3677 - mae: 0.4742 - val_loss: 0.0262 - val_mae: 0.1315
Epoch 33/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4404 - mae: 0.5019
13/13 [==============================] - 0s 2ms/step - loss: 0.3412 - mae: 0.4520

13/13 [==============================] - 0s 17ms/step - loss: 0.3412 - mae: 0.4520 - val_loss: 0.0260 - val_mae: 0.1300
Epoch 34/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2196 - mae: 0.3867
13/13 [==============================] - 0s 2ms/step - loss: 0.3216 - mae: 0.4398

13/13 [==============================] - 0s 17ms/step - loss: 0.3216 - mae: 0.4398 - val_loss: 0.0252 - val_mae: 0.1272
Epoch 35/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3057 - mae: 0.4229
13/13 [==============================] - 0s 2ms/step - loss: 0.3162 - mae: 0.4310

13/13 [==============================] - 0s 17ms/step - loss: 0.3162 - mae: 0.4310 - val_loss: 0.0246 - val_mae: 0.1244
Epoch 36/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3080 - mae: 0.4166
13/13 [==============================] - 0s 3ms/step - loss: 0.3013 - mae: 0.4289

13/13 [==============================] - 0s 18ms/step - loss: 0.3013 - mae: 0.4289 - val_loss: 0.0243 - val_mae: 0.1223
Epoch 37/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2639 - mae: 0.4153
13/13 [==============================] - 0s 2ms/step - loss: 0.3036 - mae: 0.4236

13/13 [==============================] - 0s 17ms/step - loss: 0.3036 - mae: 0.4236 - val_loss: 0.0240 - val_mae: 0.1202
Epoch 38/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2865 - mae: 0.4187
13/13 [==============================] - 0s 2ms/step - loss: 0.2877 - mae: 0.4039

13/13 [==============================] - 0s 17ms/step - loss: 0.2877 - mae: 0.4039 - val_loss: 0.0233 - val_mae: 0.1204
Epoch 39/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3534 - mae: 0.4584
13/13 [==============================] - 0s 3ms/step - loss: 0.2869 - mae: 0.4160

13/13 [==============================] - 0s 17ms/step - loss: 0.2869 - mae: 0.4160 - val_loss: 0.0229 - val_mae: 0.1188
Epoch 40/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2767 - mae: 0.3874
13/13 [==============================] - 0s 2ms/step - loss: 0.2793 - mae: 0.4045

13/13 [==============================] - 0s 17ms/step - loss: 0.2793 - mae: 0.4045 - val_loss: 0.0222 - val_mae: 0.1177
Epoch 41/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3559 - mae: 0.4542
13/13 [==============================] - 0s 2ms/step - loss: 0.3062 - mae: 0.4324

13/13 [==============================] - 0s 17ms/step - loss: 0.3062 - mae: 0.4324 - val_loss: 0.0222 - val_mae: 0.1161
Epoch 42/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2526 - mae: 0.4075
13/13 [==============================] - 0s 2ms/step - loss: 0.3311 - mae: 0.4461

13/13 [==============================] - 0s 18ms/step - loss: 0.3311 - mae: 0.4461 - val_loss: 0.0219 - val_mae: 0.1141
Epoch 43/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3049 - mae: 0.4532
13/13 [==============================] - 0s 3ms/step - loss: 0.3080 - mae: 0.4395

13/13 [==============================] - 0s 18ms/step - loss: 0.3080 - mae: 0.4395 - val_loss: 0.0216 - val_mae: 0.1135
Epoch 44/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2471 - mae: 0.3747
13/13 [==============================] - 0s 3ms/step - loss: 0.2630 - mae: 0.3894

13/13 [==============================] - 0s 17ms/step - loss: 0.2630 - mae: 0.3894 - val_loss: 0.0213 - val_mae: 0.1122
Epoch 45/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3051 - mae: 0.4078
13/13 [==============================] - 0s 2ms/step - loss: 0.3092 - mae: 0.4258

13/13 [==============================] - 0s 17ms/step - loss: 0.3092 - mae: 0.4258 - val_loss: 0.0206 - val_mae: 0.1106
Epoch 46/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3945 - mae: 0.4875
13/13 [==============================] - 0s 2ms/step - loss: 0.3211 - mae: 0.4246

13/13 [==============================] - 0s 19ms/step - loss: 0.3211 - mae: 0.4246 - val_loss: 0.0201 - val_mae: 0.1106
Epoch 47/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2183 - mae: 0.3831
13/13 [==============================] - 0s 2ms/step - loss: 0.2627 - mae: 0.3988

13/13 [==============================] - 0s 17ms/step - loss: 0.2627 - mae: 0.3988 - val_loss: 0.0199 - val_mae: 0.1094
Epoch 48/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3419 - mae: 0.4816
13/13 [==============================] - 0s 2ms/step - loss: 0.2306 - mae: 0.3714

13/13 [==============================] - 0s 16ms/step - loss: 0.2306 - mae: 0.3714 - val_loss: 0.0193 - val_mae: 0.1084
Epoch 49/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3345 - mae: 0.4194
13/13 [==============================] - 0s 2ms/step - loss: 0.2489 - mae: 0.3955

13/13 [==============================] - 0s 17ms/step - loss: 0.2489 - mae: 0.3955 - val_loss: 0.0189 - val_mae: 0.1077
Epoch 50/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2214 - mae: 0.3859
13/13 [==============================] - 0s 2ms/step - loss: 0.2395 - mae: 0.3875

13/13 [==============================] - 0s 19ms/step - loss: 0.2395 - mae: 0.3875 - val_loss: 0.0187 - val_mae: 0.1067

Run completed: runs/2022-12-07T02-45-43Z

Training run 36/52 (flags = list(32, 32, 0.001, 50, 30, "relu", "sigmoid", 0.2, 0.2)) 
Using run directory runs/2022-12-07T02-50-56Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

1/8 [==>...........................] - ETA: 2s - loss: 0.2354 - mae: 0.4066
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0012s vs `on_train_batch_end` time: 0.0043s). Check your callbacks.

8/8 [==============================] - 0s 2ms/step - loss: 0.2156 - mae: 0.3683

8/8 [==============================] - 1s 112ms/step - loss: 0.2156 - mae: 0.3683 - val_loss: 0.0755 - val_mae: 0.2200
Epoch 2/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1777 - mae: 0.3393
8/8 [==============================] - 0s 2ms/step - loss: 0.1930 - mae: 0.3494

8/8 [==============================] - 0s 25ms/step - loss: 0.1930 - mae: 0.3494 - val_loss: 0.0742 - val_mae: 0.2183
Epoch 3/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2155 - mae: 0.3500
8/8 [==============================] - 0s 2ms/step - loss: 0.2210 - mae: 0.3726

8/8 [==============================] - 0s 27ms/step - loss: 0.2210 - mae: 0.3726 - val_loss: 0.0731 - val_mae: 0.2166
Epoch 4/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2194 - mae: 0.3546
8/8 [==============================] - 0s 3ms/step - loss: 0.2067 - mae: 0.3613

8/8 [==============================] - 0s 30ms/step - loss: 0.2067 - mae: 0.3613 - val_loss: 0.0723 - val_mae: 0.2152
Epoch 5/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2728 - mae: 0.3918
8/8 [==============================] - 0s 2ms/step - loss: 0.2015 - mae: 0.3444

8/8 [==============================] - 0s 29ms/step - loss: 0.2015 - mae: 0.3444 - val_loss: 0.0709 - val_mae: 0.2133
Epoch 6/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3013 - mae: 0.4640
8/8 [==============================] - 0s 3ms/step - loss: 0.2198 - mae: 0.3737

8/8 [==============================] - 0s 26ms/step - loss: 0.2198 - mae: 0.3737 - val_loss: 0.0697 - val_mae: 0.2114
Epoch 7/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1455 - mae: 0.3144
8/8 [==============================] - 0s 3ms/step - loss: 0.1726 - mae: 0.3345

8/8 [==============================] - 0s 26ms/step - loss: 0.1726 - mae: 0.3345 - val_loss: 0.0688 - val_mae: 0.2101
Epoch 8/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1701 - mae: 0.3240
8/8 [==============================] - 0s 3ms/step - loss: 0.2111 - mae: 0.3626

8/8 [==============================] - 0s 26ms/step - loss: 0.2111 - mae: 0.3626 - val_loss: 0.0679 - val_mae: 0.2086
Epoch 9/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2667 - mae: 0.4269
8/8 [==============================] - 0s 3ms/step - loss: 0.2205 - mae: 0.3717

8/8 [==============================] - 0s 27ms/step - loss: 0.2205 - mae: 0.3717 - val_loss: 0.0672 - val_mae: 0.2072
Epoch 10/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1596 - mae: 0.3244
8/8 [==============================] - 0s 3ms/step - loss: 0.2128 - mae: 0.3658

8/8 [==============================] - 0s 27ms/step - loss: 0.2128 - mae: 0.3658 - val_loss: 0.0662 - val_mae: 0.2058
Epoch 11/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1321 - mae: 0.2979
8/8 [==============================] - 0s 2ms/step - loss: 0.1816 - mae: 0.3467

8/8 [==============================] - 0s 26ms/step - loss: 0.1816 - mae: 0.3467 - val_loss: 0.0655 - val_mae: 0.2046
Epoch 12/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2842 - mae: 0.4359
8/8 [==============================] - 0s 3ms/step - loss: 0.2101 - mae: 0.3696

8/8 [==============================] - 0s 31ms/step - loss: 0.2101 - mae: 0.3696 - val_loss: 0.0644 - val_mae: 0.2031
Epoch 13/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2054 - mae: 0.3436
8/8 [==============================] - 0s 3ms/step - loss: 0.1878 - mae: 0.3397

8/8 [==============================] - 0s 29ms/step - loss: 0.1878 - mae: 0.3397 - val_loss: 0.0635 - val_mae: 0.2016
Epoch 14/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1832 - mae: 0.3295
8/8 [==============================] - 0s 2ms/step - loss: 0.2175 - mae: 0.3688

8/8 [==============================] - 0s 27ms/step - loss: 0.2175 - mae: 0.3688 - val_loss: 0.0626 - val_mae: 0.2000
Epoch 15/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2288 - mae: 0.4022
8/8 [==============================] - 0s 3ms/step - loss: 0.1803 - mae: 0.3318

8/8 [==============================] - 0s 26ms/step - loss: 0.1803 - mae: 0.3318 - val_loss: 0.0618 - val_mae: 0.1985
Epoch 16/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1666 - mae: 0.3234
8/8 [==============================] - 0s 2ms/step - loss: 0.1814 - mae: 0.3377

8/8 [==============================] - 0s 27ms/step - loss: 0.1814 - mae: 0.3377 - val_loss: 0.0611 - val_mae: 0.1971
Epoch 17/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2336 - mae: 0.3930
8/8 [==============================] - 0s 2ms/step - loss: 0.2156 - mae: 0.3713

8/8 [==============================] - 0s 26ms/step - loss: 0.2156 - mae: 0.3713 - val_loss: 0.0602 - val_mae: 0.1956
Epoch 18/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2489 - mae: 0.3986
8/8 [==============================] - 0s 3ms/step - loss: 0.2146 - mae: 0.3750

8/8 [==============================] - 0s 26ms/step - loss: 0.2146 - mae: 0.3750 - val_loss: 0.0595 - val_mae: 0.1946
Epoch 19/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2082 - mae: 0.3836
8/8 [==============================] - 0s 3ms/step - loss: 0.2126 - mae: 0.3662

8/8 [==============================] - 0s 27ms/step - loss: 0.2126 - mae: 0.3662 - val_loss: 0.0586 - val_mae: 0.1933
Epoch 20/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1318 - mae: 0.2903
8/8 [==============================] - 0s 3ms/step - loss: 0.1768 - mae: 0.3346

8/8 [==============================] - 0s 27ms/step - loss: 0.1768 - mae: 0.3346 - val_loss: 0.0580 - val_mae: 0.1921
Epoch 21/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2096 - mae: 0.3765
8/8 [==============================] - 0s 2ms/step - loss: 0.2013 - mae: 0.3520

8/8 [==============================] - 0s 27ms/step - loss: 0.2013 - mae: 0.3520 - val_loss: 0.0575 - val_mae: 0.1909
Epoch 22/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1648 - mae: 0.3339
8/8 [==============================] - 0s 2ms/step - loss: 0.1898 - mae: 0.3458

8/8 [==============================] - 0s 24ms/step - loss: 0.1898 - mae: 0.3458 - val_loss: 0.0568 - val_mae: 0.1897
Epoch 23/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1625 - mae: 0.3318
8/8 [==============================] - 0s 2ms/step - loss: 0.1753 - mae: 0.3349

8/8 [==============================] - 0s 24ms/step - loss: 0.1753 - mae: 0.3349 - val_loss: 0.0561 - val_mae: 0.1885
Epoch 24/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1292 - mae: 0.2933
8/8 [==============================] - 0s 2ms/step - loss: 0.1658 - mae: 0.3156

8/8 [==============================] - 0s 27ms/step - loss: 0.1658 - mae: 0.3156 - val_loss: 0.0556 - val_mae: 0.1875
Epoch 25/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1043 - mae: 0.2576
8/8 [==============================] - 0s 2ms/step - loss: 0.1784 - mae: 0.3260

8/8 [==============================] - 0s 26ms/step - loss: 0.1784 - mae: 0.3260 - val_loss: 0.0550 - val_mae: 0.1866
Epoch 26/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2222 - mae: 0.3709
8/8 [==============================] - 0s 2ms/step - loss: 0.1994 - mae: 0.3529

8/8 [==============================] - 0s 26ms/step - loss: 0.1994 - mae: 0.3529 - val_loss: 0.0542 - val_mae: 0.1855
Epoch 27/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1834 - mae: 0.3360
8/8 [==============================] - 0s 2ms/step - loss: 0.1684 - mae: 0.3291

8/8 [==============================] - 0s 27ms/step - loss: 0.1684 - mae: 0.3291 - val_loss: 0.0536 - val_mae: 0.1849
Epoch 28/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1596 - mae: 0.3238
8/8 [==============================] - 0s 2ms/step - loss: 0.1727 - mae: 0.3319

8/8 [==============================] - 0s 27ms/step - loss: 0.1727 - mae: 0.3319 - val_loss: 0.0531 - val_mae: 0.1842
Epoch 29/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1702 - mae: 0.3313
8/8 [==============================] - 0s 2ms/step - loss: 0.1852 - mae: 0.3433

8/8 [==============================] - 0s 24ms/step - loss: 0.1852 - mae: 0.3433 - val_loss: 0.0525 - val_mae: 0.1828
Epoch 30/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2603 - mae: 0.3922
8/8 [==============================] - 0s 2ms/step - loss: 0.2000 - mae: 0.3552

8/8 [==============================] - 0s 27ms/step - loss: 0.2000 - mae: 0.3552 - val_loss: 0.0518 - val_mae: 0.1815

Run completed: runs/2022-12-07T02-50-56Z

Training run 37/52 (flags = list(16, 10, 0.001, 50, 50, "sigmoid", "tanh", 0.6, 0.2)) 
Using run directory runs/2022-12-07T02-51-12Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

1/8 [==>...........................] - ETA: 2s - loss: 0.4018 - mae: 0.5112
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0016s vs `on_train_batch_end` time: 0.0020s). Check your callbacks.

8/8 [==============================] - 0s 2ms/step - loss: 0.3817 - mae: 0.4964

8/8 [==============================] - 1s 100ms/step - loss: 0.3817 - mae: 0.4964 - val_loss: 0.0893 - val_mae: 0.2783
Epoch 2/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2906 - mae: 0.4502
8/8 [==============================] - 0s 2ms/step - loss: 0.3679 - mae: 0.5037

8/8 [==============================] - 0s 27ms/step - loss: 0.3679 - mae: 0.5037 - val_loss: 0.0775 - val_mae: 0.2572
Epoch 3/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3952 - mae: 0.5205
8/8 [==============================] - 0s 3ms/step - loss: 0.3882 - mae: 0.5076

8/8 [==============================] - 0s 27ms/step - loss: 0.3882 - mae: 0.5076 - val_loss: 0.0678 - val_mae: 0.2388
Epoch 4/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2590 - mae: 0.3983
8/8 [==============================] - 0s 3ms/step - loss: 0.3337 - mae: 0.4659

8/8 [==============================] - 0s 30ms/step - loss: 0.3337 - mae: 0.4659 - val_loss: 0.0583 - val_mae: 0.2191
Epoch 5/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2651 - mae: 0.4051
8/8 [==============================] - 0s 2ms/step - loss: 0.3521 - mae: 0.4729

8/8 [==============================] - 0s 27ms/step - loss: 0.3521 - mae: 0.4729 - val_loss: 0.0504 - val_mae: 0.2021
Epoch 6/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4352 - mae: 0.5308
8/8 [==============================] - 0s 2ms/step - loss: 0.3277 - mae: 0.4622

8/8 [==============================] - 0s 26ms/step - loss: 0.3277 - mae: 0.4622 - val_loss: 0.0435 - val_mae: 0.1866
Epoch 7/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2163 - mae: 0.3598
8/8 [==============================] - 0s 2ms/step - loss: 0.3305 - mae: 0.4603

8/8 [==============================] - 0s 24ms/step - loss: 0.3305 - mae: 0.4603 - val_loss: 0.0367 - val_mae: 0.1706
Epoch 8/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2269 - mae: 0.3543
8/8 [==============================] - 0s 3ms/step - loss: 0.2891 - mae: 0.4160

8/8 [==============================] - 0s 26ms/step - loss: 0.2891 - mae: 0.4160 - val_loss: 0.0319 - val_mae: 0.1586
Epoch 9/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3225 - mae: 0.4820
8/8 [==============================] - 0s 3ms/step - loss: 0.3002 - mae: 0.4382

8/8 [==============================] - 0s 31ms/step - loss: 0.3002 - mae: 0.4382 - val_loss: 0.0271 - val_mae: 0.1454
Epoch 10/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3903 - mae: 0.4987
8/8 [==============================] - 0s 2ms/step - loss: 0.3229 - mae: 0.4511

8/8 [==============================] - 0s 29ms/step - loss: 0.3229 - mae: 0.4511 - val_loss: 0.0245 - val_mae: 0.1376
Epoch 11/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2633 - mae: 0.4221
8/8 [==============================] - 0s 2ms/step - loss: 0.3004 - mae: 0.4396

8/8 [==============================] - 0s 27ms/step - loss: 0.3004 - mae: 0.4396 - val_loss: 0.0218 - val_mae: 0.1287
Epoch 12/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2456 - mae: 0.3760
8/8 [==============================] - 0s 2ms/step - loss: 0.3129 - mae: 0.4519

8/8 [==============================] - 0s 26ms/step - loss: 0.3129 - mae: 0.4519 - val_loss: 0.0193 - val_mae: 0.1194
Epoch 13/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3068 - mae: 0.4358
8/8 [==============================] - 0s 2ms/step - loss: 0.2815 - mae: 0.4228

8/8 [==============================] - 0s 27ms/step - loss: 0.2815 - mae: 0.4228 - val_loss: 0.0170 - val_mae: 0.1109
Epoch 14/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2167 - mae: 0.3747
8/8 [==============================] - 0s 2ms/step - loss: 0.2636 - mae: 0.4179

8/8 [==============================] - 0s 27ms/step - loss: 0.2636 - mae: 0.4179 - val_loss: 0.0163 - val_mae: 0.1080
Epoch 15/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2078 - mae: 0.3760
8/8 [==============================] - 0s 2ms/step - loss: 0.2608 - mae: 0.4120

8/8 [==============================] - 0s 26ms/step - loss: 0.2608 - mae: 0.4120 - val_loss: 0.0151 - val_mae: 0.1034
Epoch 16/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2702 - mae: 0.4310
8/8 [==============================] - 0s 2ms/step - loss: 0.2595 - mae: 0.4098

8/8 [==============================] - 0s 29ms/step - loss: 0.2595 - mae: 0.4098 - val_loss: 0.0146 - val_mae: 0.1014
Epoch 17/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2519 - mae: 0.3895
8/8 [==============================] - 0s 2ms/step - loss: 0.2886 - mae: 0.4317

8/8 [==============================] - 0s 26ms/step - loss: 0.2886 - mae: 0.4317 - val_loss: 0.0139 - val_mae: 0.0984
Epoch 18/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3196 - mae: 0.4733
8/8 [==============================] - 0s 2ms/step - loss: 0.2433 - mae: 0.4002

8/8 [==============================] - 0s 26ms/step - loss: 0.2433 - mae: 0.4002 - val_loss: 0.0130 - val_mae: 0.0954
Epoch 19/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2862 - mae: 0.4541
8/8 [==============================] - 0s 2ms/step - loss: 0.2850 - mae: 0.4317

8/8 [==============================] - 0s 26ms/step - loss: 0.2850 - mae: 0.4317 - val_loss: 0.0120 - val_mae: 0.0916
Epoch 20/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2514 - mae: 0.3801
8/8 [==============================] - 0s 2ms/step - loss: 0.2477 - mae: 0.3968

8/8 [==============================] - 0s 25ms/step - loss: 0.2477 - mae: 0.3968 - val_loss: 0.0118 - val_mae: 0.0908
Epoch 21/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2566 - mae: 0.4019
8/8 [==============================] - 0s 3ms/step - loss: 0.2629 - mae: 0.4162

8/8 [==============================] - 0s 26ms/step - loss: 0.2629 - mae: 0.4162 - val_loss: 0.0114 - val_mae: 0.0894
Epoch 22/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2986 - mae: 0.4645
8/8 [==============================] - 0s 2ms/step - loss: 0.2751 - mae: 0.4219

8/8 [==============================] - 0s 26ms/step - loss: 0.2751 - mae: 0.4219 - val_loss: 0.0108 - val_mae: 0.0863
Epoch 23/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3582 - mae: 0.4696
8/8 [==============================] - 0s 2ms/step - loss: 0.2488 - mae: 0.3938

8/8 [==============================] - 0s 26ms/step - loss: 0.2488 - mae: 0.3938 - val_loss: 0.0105 - val_mae: 0.0848
Epoch 24/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2348 - mae: 0.3846
8/8 [==============================] - 0s 2ms/step - loss: 0.2386 - mae: 0.3936

8/8 [==============================] - 0s 26ms/step - loss: 0.2386 - mae: 0.3936 - val_loss: 0.0101 - val_mae: 0.0825
Epoch 25/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2834 - mae: 0.4122
8/8 [==============================] - 0s 2ms/step - loss: 0.2244 - mae: 0.3704

8/8 [==============================] - 0s 26ms/step - loss: 0.2244 - mae: 0.3704 - val_loss: 0.0099 - val_mae: 0.0812
Epoch 26/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2707 - mae: 0.4097
8/8 [==============================] - 0s 2ms/step - loss: 0.2279 - mae: 0.3788

8/8 [==============================] - 0s 27ms/step - loss: 0.2279 - mae: 0.3788 - val_loss: 0.0097 - val_mae: 0.0800
Epoch 27/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2467 - mae: 0.4002
8/8 [==============================] - 0s 2ms/step - loss: 0.2528 - mae: 0.3999

8/8 [==============================] - 0s 27ms/step - loss: 0.2528 - mae: 0.3999 - val_loss: 0.0096 - val_mae: 0.0794
Epoch 28/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2548 - mae: 0.3766
8/8 [==============================] - 0s 2ms/step - loss: 0.2598 - mae: 0.4103

8/8 [==============================] - 0s 25ms/step - loss: 0.2598 - mae: 0.4103 - val_loss: 0.0095 - val_mae: 0.0790
Epoch 29/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3096 - mae: 0.4085
8/8 [==============================] - 0s 2ms/step - loss: 0.2439 - mae: 0.3822

8/8 [==============================] - 0s 27ms/step - loss: 0.2439 - mae: 0.3822 - val_loss: 0.0094 - val_mae: 0.0783
Epoch 30/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2040 - mae: 0.3618
8/8 [==============================] - 0s 2ms/step - loss: 0.1874 - mae: 0.3492

8/8 [==============================] - 0s 27ms/step - loss: 0.1874 - mae: 0.3492 - val_loss: 0.0093 - val_mae: 0.0776
Epoch 31/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2489 - mae: 0.4010
8/8 [==============================] - 0s 2ms/step - loss: 0.2361 - mae: 0.3948

8/8 [==============================] - 0s 26ms/step - loss: 0.2361 - mae: 0.3948 - val_loss: 0.0092 - val_mae: 0.0771
Epoch 32/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1378 - mae: 0.2826
8/8 [==============================] - 0s 3ms/step - loss: 0.1870 - mae: 0.3420

8/8 [==============================] - 0s 27ms/step - loss: 0.1870 - mae: 0.3420 - val_loss: 0.0092 - val_mae: 0.0769
Epoch 33/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3150 - mae: 0.4331
8/8 [==============================] - 0s 2ms/step - loss: 0.2511 - mae: 0.3980

8/8 [==============================] - 0s 26ms/step - loss: 0.2511 - mae: 0.3980 - val_loss: 0.0092 - val_mae: 0.0764
Epoch 34/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1771 - mae: 0.3545
8/8 [==============================] - 0s 3ms/step - loss: 0.2066 - mae: 0.3719

8/8 [==============================] - 0s 27ms/step - loss: 0.2066 - mae: 0.3719 - val_loss: 0.0091 - val_mae: 0.0762
Epoch 35/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1935 - mae: 0.3497
8/8 [==============================] - 0s 2ms/step - loss: 0.2077 - mae: 0.3690

8/8 [==============================] - 0s 27ms/step - loss: 0.2077 - mae: 0.3690 - val_loss: 0.0091 - val_mae: 0.0760
Epoch 36/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2129 - mae: 0.3714
8/8 [==============================] - 0s 2ms/step - loss: 0.2040 - mae: 0.3576

8/8 [==============================] - 0s 27ms/step - loss: 0.2040 - mae: 0.3576 - val_loss: 0.0090 - val_mae: 0.0757
Epoch 37/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1416 - mae: 0.2952
8/8 [==============================] - 0s 2ms/step - loss: 0.2173 - mae: 0.3780

8/8 [==============================] - 0s 26ms/step - loss: 0.2173 - mae: 0.3780 - val_loss: 0.0090 - val_mae: 0.0753
Epoch 38/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2110 - mae: 0.3809
8/8 [==============================] - 0s 2ms/step - loss: 0.2087 - mae: 0.3697

8/8 [==============================] - 0s 26ms/step - loss: 0.2087 - mae: 0.3697 - val_loss: 0.0090 - val_mae: 0.0750
Epoch 39/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2518 - mae: 0.4015
8/8 [==============================] - 0s 2ms/step - loss: 0.2060 - mae: 0.3621

8/8 [==============================] - 0s 25ms/step - loss: 0.2060 - mae: 0.3621 - val_loss: 0.0090 - val_mae: 0.0748
Epoch 40/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2391 - mae: 0.3995
8/8 [==============================] - 0s 2ms/step - loss: 0.1991 - mae: 0.3612

8/8 [==============================] - 0s 27ms/step - loss: 0.1991 - mae: 0.3612 - val_loss: 0.0090 - val_mae: 0.0745
Epoch 41/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2094 - mae: 0.3678
8/8 [==============================] - 0s 2ms/step - loss: 0.1992 - mae: 0.3660

8/8 [==============================] - 0s 26ms/step - loss: 0.1992 - mae: 0.3660 - val_loss: 0.0089 - val_mae: 0.0744
Epoch 42/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2117 - mae: 0.3424
8/8 [==============================] - 0s 2ms/step - loss: 0.1873 - mae: 0.3478

8/8 [==============================] - 0s 27ms/step - loss: 0.1873 - mae: 0.3478 - val_loss: 0.0089 - val_mae: 0.0740
Epoch 43/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1639 - mae: 0.3318
8/8 [==============================] - 0s 3ms/step - loss: 0.1786 - mae: 0.3457

8/8 [==============================] - 0s 26ms/step - loss: 0.1786 - mae: 0.3457 - val_loss: 0.0089 - val_mae: 0.0738
Epoch 44/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1964 - mae: 0.3548
8/8 [==============================] - 0s 2ms/step - loss: 0.1842 - mae: 0.3365

8/8 [==============================] - 0s 26ms/step - loss: 0.1842 - mae: 0.3365 - val_loss: 0.0089 - val_mae: 0.0736
Epoch 45/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1558 - mae: 0.3062
8/8 [==============================] - 0s 3ms/step - loss: 0.2148 - mae: 0.3670

8/8 [==============================] - 0s 27ms/step - loss: 0.2148 - mae: 0.3670 - val_loss: 0.0089 - val_mae: 0.0734
Epoch 46/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2370 - mae: 0.4055
8/8 [==============================] - 0s 2ms/step - loss: 0.2020 - mae: 0.3634

8/8 [==============================] - 0s 26ms/step - loss: 0.2020 - mae: 0.3634 - val_loss: 0.0089 - val_mae: 0.0732
Epoch 47/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2572 - mae: 0.4274
8/8 [==============================] - 0s 2ms/step - loss: 0.1878 - mae: 0.3521

8/8 [==============================] - 0s 27ms/step - loss: 0.1878 - mae: 0.3521 - val_loss: 0.0088 - val_mae: 0.0729
Epoch 48/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2606 - mae: 0.4150
8/8 [==============================] - 0s 2ms/step - loss: 0.1901 - mae: 0.3567

8/8 [==============================] - 0s 26ms/step - loss: 0.1901 - mae: 0.3567 - val_loss: 0.0088 - val_mae: 0.0728
Epoch 49/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1737 - mae: 0.3243
8/8 [==============================] - 0s 3ms/step - loss: 0.1946 - mae: 0.3490

8/8 [==============================] - 0s 27ms/step - loss: 0.1946 - mae: 0.3490 - val_loss: 0.0089 - val_mae: 0.0726
Epoch 50/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1856 - mae: 0.3689
8/8 [==============================] - 0s 3ms/step - loss: 0.1854 - mae: 0.3493

8/8 [==============================] - 0s 26ms/step - loss: 0.1854 - mae: 0.3493 - val_loss: 0.0088 - val_mae: 0.0725

Run completed: runs/2022-12-07T02-51-12Z

Training run 38/52 (flags = list(32, 32, 0.001, 30, 50, "relu", "tanh", 0.6, 0.6)) 
Using run directory runs/2022-12-07T02-51-32Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

 1/13 [=>............................] - ETA: 6s - loss: 1.8713 - mae: 1.1670
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0011s vs `on_train_batch_end` time: 0.0024s). Check your callbacks.

13/13 [==============================] - 1s 2ms/step - loss: 1.4142 - mae: 0.9414

13/13 [==============================] - 1s 60ms/step - loss: 1.4142 - mae: 0.9414 - val_loss: 0.4932 - val_mae: 0.6167
Epoch 2/50

 1/13 [=>............................] - ETA: 0s - loss: 1.0889 - mae: 0.8679
13/13 [==============================] - 0s 2ms/step - loss: 1.2206 - mae: 0.8842

13/13 [==============================] - 0s 16ms/step - loss: 1.2206 - mae: 0.8842 - val_loss: 0.3982 - val_mae: 0.5521
Epoch 3/50

 1/13 [=>............................] - ETA: 0s - loss: 1.2005 - mae: 0.8985
13/13 [==============================] - 0s 2ms/step - loss: 1.1532 - mae: 0.8364

13/13 [==============================] - 0s 16ms/step - loss: 1.1532 - mae: 0.8364 - val_loss: 0.3041 - val_mae: 0.4789
Epoch 4/50

 1/13 [=>............................] - ETA: 0s - loss: 1.2721 - mae: 0.7839
13/13 [==============================] - 0s 2ms/step - loss: 1.0222 - mae: 0.7955

13/13 [==============================] - 0s 19ms/step - loss: 1.0222 - mae: 0.7955 - val_loss: 0.2483 - val_mae: 0.4296
Epoch 5/50

 1/13 [=>............................] - ETA: 0s - loss: 1.0185 - mae: 0.8099
13/13 [==============================] - 0s 3ms/step - loss: 1.0925 - mae: 0.8351

13/13 [==============================] - 0s 18ms/step - loss: 1.0925 - mae: 0.8351 - val_loss: 0.1995 - val_mae: 0.3815
Epoch 6/50

 1/13 [=>............................] - ETA: 0s - loss: 1.0346 - mae: 0.8825
13/13 [==============================] - 0s 2ms/step - loss: 0.8975 - mae: 0.7620

13/13 [==============================] - 0s 18ms/step - loss: 0.8975 - mae: 0.7620 - val_loss: 0.1678 - val_mae: 0.3450
Epoch 7/50

 1/13 [=>............................] - ETA: 0s - loss: 1.3675 - mae: 0.8696
13/13 [==============================] - 0s 2ms/step - loss: 0.8853 - mae: 0.7359

13/13 [==============================] - 0s 16ms/step - loss: 0.8853 - mae: 0.7359 - val_loss: 0.1349 - val_mae: 0.3005
Epoch 8/50

 1/13 [=>............................] - ETA: 0s - loss: 1.0340 - mae: 0.8252
13/13 [==============================] - 0s 2ms/step - loss: 0.9002 - mae: 0.7627

13/13 [==============================] - 0s 16ms/step - loss: 0.9002 - mae: 0.7627 - val_loss: 0.1203 - val_mae: 0.2803
Epoch 9/50

 1/13 [=>............................] - ETA: 0s - loss: 0.9112 - mae: 0.7675
13/13 [==============================] - 0s 2ms/step - loss: 0.7854 - mae: 0.6927

13/13 [==============================] - 0s 15ms/step - loss: 0.7854 - mae: 0.6927 - val_loss: 0.1104 - val_mae: 0.2659
Epoch 10/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6518 - mae: 0.6293
13/13 [==============================] - 0s 2ms/step - loss: 0.8639 - mae: 0.7386

13/13 [==============================] - 0s 17ms/step - loss: 0.8639 - mae: 0.7386 - val_loss: 0.0951 - val_mae: 0.2417
Epoch 11/50

 1/13 [=>............................] - ETA: 0s - loss: 0.9245 - mae: 0.6540
13/13 [==============================] - 0s 2ms/step - loss: 0.8151 - mae: 0.6956

13/13 [==============================] - 0s 15ms/step - loss: 0.8151 - mae: 0.6956 - val_loss: 0.0917 - val_mae: 0.2375
Epoch 12/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6435 - mae: 0.6588
13/13 [==============================] - 0s 2ms/step - loss: 0.6909 - mae: 0.6648

13/13 [==============================] - 0s 17ms/step - loss: 0.6909 - mae: 0.6648 - val_loss: 0.0860 - val_mae: 0.2294
Epoch 13/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6639 - mae: 0.6515
13/13 [==============================] - 0s 2ms/step - loss: 0.7799 - mae: 0.6898

13/13 [==============================] - 0s 16ms/step - loss: 0.7799 - mae: 0.6898 - val_loss: 0.0771 - val_mae: 0.2146
Epoch 14/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4727 - mae: 0.5575
13/13 [==============================] - 0s 4ms/step - loss: 0.6768 - mae: 0.6309

13/13 [==============================] - 0s 19ms/step - loss: 0.6768 - mae: 0.6309 - val_loss: 0.0701 - val_mae: 0.2035
Epoch 15/50

 1/13 [=>............................] - ETA: 0s - loss: 0.8972 - mae: 0.7471
13/13 [==============================] - 0s 3ms/step - loss: 0.6595 - mae: 0.6328

13/13 [==============================] - 0s 17ms/step - loss: 0.6595 - mae: 0.6328 - val_loss: 0.0662 - val_mae: 0.1985
Epoch 16/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6256 - mae: 0.6529
13/13 [==============================] - 0s 2ms/step - loss: 0.5725 - mae: 0.6013

13/13 [==============================] - 0s 16ms/step - loss: 0.5725 - mae: 0.6013 - val_loss: 0.0667 - val_mae: 0.1982
Epoch 17/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5460 - mae: 0.5722
13/13 [==============================] - 0s 2ms/step - loss: 0.6181 - mae: 0.6204

13/13 [==============================] - 0s 15ms/step - loss: 0.6181 - mae: 0.6204 - val_loss: 0.0639 - val_mae: 0.1942
Epoch 18/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5119 - mae: 0.5936
13/13 [==============================] - 0s 2ms/step - loss: 0.5655 - mae: 0.5877

13/13 [==============================] - 0s 15ms/step - loss: 0.5655 - mae: 0.5877 - val_loss: 0.0597 - val_mae: 0.1874
Epoch 19/50

 1/13 [=>............................] - ETA: 0s - loss: 1.0830 - mae: 0.7542
13/13 [==============================] - 0s 2ms/step - loss: 0.6346 - mae: 0.6299

13/13 [==============================] - 0s 17ms/step - loss: 0.6346 - mae: 0.6299 - val_loss: 0.0567 - val_mae: 0.1832
Epoch 20/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6916 - mae: 0.6630
13/13 [==============================] - 0s 2ms/step - loss: 0.5483 - mae: 0.5883

13/13 [==============================] - 0s 15ms/step - loss: 0.5483 - mae: 0.5883 - val_loss: 0.0557 - val_mae: 0.1819
Epoch 21/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4184 - mae: 0.5337
13/13 [==============================] - 0s 2ms/step - loss: 0.5581 - mae: 0.5843

13/13 [==============================] - 0s 17ms/step - loss: 0.5581 - mae: 0.5843 - val_loss: 0.0555 - val_mae: 0.1830
Epoch 22/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6671 - mae: 0.6540
13/13 [==============================] - 0s 2ms/step - loss: 0.4854 - mae: 0.5452

13/13 [==============================] - 0s 15ms/step - loss: 0.4854 - mae: 0.5452 - val_loss: 0.0521 - val_mae: 0.1764
Epoch 23/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4507 - mae: 0.5666
13/13 [==============================] - 0s 2ms/step - loss: 0.5596 - mae: 0.5923

13/13 [==============================] - 0s 16ms/step - loss: 0.5596 - mae: 0.5923 - val_loss: 0.0500 - val_mae: 0.1725
Epoch 24/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7448 - mae: 0.6858
13/13 [==============================] - 0s 2ms/step - loss: 0.5382 - mae: 0.5649

13/13 [==============================] - 0s 15ms/step - loss: 0.5382 - mae: 0.5649 - val_loss: 0.0484 - val_mae: 0.1695
Epoch 25/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1993 - mae: 0.3494
13/13 [==============================] - 0s 2ms/step - loss: 0.5275 - mae: 0.5636

13/13 [==============================] - 0s 16ms/step - loss: 0.5275 - mae: 0.5636 - val_loss: 0.0479 - val_mae: 0.1699
Epoch 26/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4511 - mae: 0.5534
13/13 [==============================] - 0s 2ms/step - loss: 0.4831 - mae: 0.5440

13/13 [==============================] - 0s 17ms/step - loss: 0.4831 - mae: 0.5440 - val_loss: 0.0467 - val_mae: 0.1674
Epoch 27/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5673 - mae: 0.6558
13/13 [==============================] - 0s 2ms/step - loss: 0.4796 - mae: 0.5563

13/13 [==============================] - 0s 16ms/step - loss: 0.4796 - mae: 0.5563 - val_loss: 0.0451 - val_mae: 0.1646
Epoch 28/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4398 - mae: 0.5693
13/13 [==============================] - 0s 2ms/step - loss: 0.5031 - mae: 0.5388

13/13 [==============================] - 0s 17ms/step - loss: 0.5031 - mae: 0.5388 - val_loss: 0.0435 - val_mae: 0.1610
Epoch 29/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3785 - mae: 0.4818
13/13 [==============================] - 0s 2ms/step - loss: 0.4606 - mae: 0.5337

13/13 [==============================] - 0s 16ms/step - loss: 0.4606 - mae: 0.5337 - val_loss: 0.0423 - val_mae: 0.1586
Epoch 30/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3296 - mae: 0.4460
13/13 [==============================] - 0s 3ms/step - loss: 0.4503 - mae: 0.5131

13/13 [==============================] - 0s 17ms/step - loss: 0.4503 - mae: 0.5131 - val_loss: 0.0413 - val_mae: 0.1571
Epoch 31/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4367 - mae: 0.4887
13/13 [==============================] - 0s 2ms/step - loss: 0.3627 - mae: 0.4709

13/13 [==============================] - 0s 17ms/step - loss: 0.3627 - mae: 0.4709 - val_loss: 0.0402 - val_mae: 0.1544
Epoch 32/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3601 - mae: 0.4906
13/13 [==============================] - 0s 2ms/step - loss: 0.3939 - mae: 0.4888

13/13 [==============================] - 0s 17ms/step - loss: 0.3939 - mae: 0.4888 - val_loss: 0.0390 - val_mae: 0.1518
Epoch 33/50

 1/13 [=>............................] - ETA: 0s - loss: 0.6518 - mae: 0.5865
13/13 [==============================] - 0s 2ms/step - loss: 0.4596 - mae: 0.5219

13/13 [==============================] - 0s 16ms/step - loss: 0.4596 - mae: 0.5219 - val_loss: 0.0373 - val_mae: 0.1480
Epoch 34/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2969 - mae: 0.4192
13/13 [==============================] - 0s 2ms/step - loss: 0.4021 - mae: 0.4903

13/13 [==============================] - 0s 17ms/step - loss: 0.4021 - mae: 0.4903 - val_loss: 0.0371 - val_mae: 0.1491
Epoch 35/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3612 - mae: 0.5259
13/13 [==============================] - 0s 2ms/step - loss: 0.4144 - mae: 0.5075

13/13 [==============================] - 0s 15ms/step - loss: 0.4144 - mae: 0.5075 - val_loss: 0.0364 - val_mae: 0.1479
Epoch 36/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2596 - mae: 0.4161
13/13 [==============================] - 0s 2ms/step - loss: 0.3442 - mae: 0.4596

13/13 [==============================] - 0s 16ms/step - loss: 0.3442 - mae: 0.4596 - val_loss: 0.0358 - val_mae: 0.1469
Epoch 37/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3086 - mae: 0.4571
13/13 [==============================] - 0s 2ms/step - loss: 0.3140 - mae: 0.4447

13/13 [==============================] - 0s 17ms/step - loss: 0.3140 - mae: 0.4447 - val_loss: 0.0350 - val_mae: 0.1458
Epoch 38/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2832 - mae: 0.4330
13/13 [==============================] - 0s 2ms/step - loss: 0.3856 - mae: 0.4793

13/13 [==============================] - 0s 15ms/step - loss: 0.3856 - mae: 0.4793 - val_loss: 0.0343 - val_mae: 0.1445
Epoch 39/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3206 - mae: 0.4303
13/13 [==============================] - 0s 2ms/step - loss: 0.3977 - mae: 0.4815

13/13 [==============================] - 0s 16ms/step - loss: 0.3977 - mae: 0.4815 - val_loss: 0.0344 - val_mae: 0.1462
Epoch 40/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3536 - mae: 0.4969
13/13 [==============================] - 0s 2ms/step - loss: 0.3213 - mae: 0.4412

13/13 [==============================] - 0s 16ms/step - loss: 0.3213 - mae: 0.4412 - val_loss: 0.0338 - val_mae: 0.1450
Epoch 41/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2753 - mae: 0.4159
13/13 [==============================] - 0s 2ms/step - loss: 0.3099 - mae: 0.4420

13/13 [==============================] - 0s 15ms/step - loss: 0.3099 - mae: 0.4420 - val_loss: 0.0332 - val_mae: 0.1437
Epoch 42/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1973 - mae: 0.3773
13/13 [==============================] - 0s 2ms/step - loss: 0.3350 - mae: 0.4486

13/13 [==============================] - 0s 19ms/step - loss: 0.3350 - mae: 0.4486 - val_loss: 0.0324 - val_mae: 0.1416
Epoch 43/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3753 - mae: 0.4701
13/13 [==============================] - 0s 2ms/step - loss: 0.3552 - mae: 0.4657

13/13 [==============================] - 0s 16ms/step - loss: 0.3552 - mae: 0.4657 - val_loss: 0.0315 - val_mae: 0.1399
Epoch 44/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2848 - mae: 0.4612
13/13 [==============================] - 0s 2ms/step - loss: 0.3331 - mae: 0.4510

13/13 [==============================] - 0s 17ms/step - loss: 0.3331 - mae: 0.4510 - val_loss: 0.0305 - val_mae: 0.1374
Epoch 45/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3246 - mae: 0.4580
13/13 [==============================] - 0s 2ms/step - loss: 0.2991 - mae: 0.4244

13/13 [==============================] - 0s 15ms/step - loss: 0.2991 - mae: 0.4244 - val_loss: 0.0293 - val_mae: 0.1338
Epoch 46/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2593 - mae: 0.3846
13/13 [==============================] - 0s 2ms/step - loss: 0.2599 - mae: 0.4012

13/13 [==============================] - 0s 16ms/step - loss: 0.2599 - mae: 0.4012 - val_loss: 0.0288 - val_mae: 0.1334
Epoch 47/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3094 - mae: 0.4250
13/13 [==============================] - 0s 2ms/step - loss: 0.2720 - mae: 0.3938

13/13 [==============================] - 0s 16ms/step - loss: 0.2720 - mae: 0.3938 - val_loss: 0.0282 - val_mae: 0.1302
Epoch 48/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3225 - mae: 0.4681
13/13 [==============================] - 0s 2ms/step - loss: 0.2989 - mae: 0.4338

13/13 [==============================] - 0s 15ms/step - loss: 0.2989 - mae: 0.4338 - val_loss: 0.0282 - val_mae: 0.1315
Epoch 49/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2776 - mae: 0.3953
13/13 [==============================] - 0s 3ms/step - loss: 0.2690 - mae: 0.4049

13/13 [==============================] - 0s 17ms/step - loss: 0.2690 - mae: 0.4049 - val_loss: 0.0275 - val_mae: 0.1297
Epoch 50/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1493 - mae: 0.3342
13/13 [==============================] - 0s 2ms/step - loss: 0.2460 - mae: 0.3888

13/13 [==============================] - 0s 17ms/step - loss: 0.2460 - mae: 0.3888 - val_loss: 0.0271 - val_mae: 0.1282

Run completed: runs/2022-12-07T02-51-32Z

Training run 39/52 (flags = list(32, 10, 0.001, 50, 50, "sigmoid", "relu", 0.6, 0.6)) 
Using run directory runs/2022-12-07T02-51-53Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

1/8 [==>...........................] - ETA: 2s - loss: 2.6934 - mae: 1.1497
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0011s vs `on_train_batch_end` time: 0.0037s). Check your callbacks.

8/8 [==============================] - 0s 2ms/step - loss: 1.8747 - mae: 1.0162

8/8 [==============================] - 1s 102ms/step - loss: 1.8747 - mae: 1.0162 - val_loss: 0.0510 - val_mae: 0.1737
Epoch 2/50

1/8 [==>...........................] - ETA: 0s - loss: 1.8045 - mae: 0.8950
8/8 [==============================] - 0s 2ms/step - loss: 1.6286 - mae: 0.9656

8/8 [==============================] - 0s 27ms/step - loss: 1.6286 - mae: 0.9656 - val_loss: 0.0423 - val_mae: 0.1542
Epoch 3/50

1/8 [==>...........................] - ETA: 0s - loss: 1.5594 - mae: 0.9871
8/8 [==============================] - 0s 2ms/step - loss: 1.3143 - mae: 0.8674

8/8 [==============================] - 0s 27ms/step - loss: 1.3143 - mae: 0.8674 - val_loss: 0.0384 - val_mae: 0.1490
Epoch 4/50

1/8 [==>...........................] - ETA: 0s - loss: 2.1631 - mae: 1.0616
8/8 [==============================] - 0s 3ms/step - loss: 1.5291 - mae: 0.9397

8/8 [==============================] - 0s 31ms/step - loss: 1.5291 - mae: 0.9397 - val_loss: 0.0392 - val_mae: 0.1580
Epoch 5/50

1/8 [==>...........................] - ETA: 0s - loss: 1.0399 - mae: 0.7909
8/8 [==============================] - 0s 2ms/step - loss: 1.3180 - mae: 0.8686

8/8 [==============================] - 0s 26ms/step - loss: 1.3180 - mae: 0.8686 - val_loss: 0.0435 - val_mae: 0.1734
Epoch 6/50

1/8 [==>...........................] - ETA: 0s - loss: 1.3039 - mae: 0.8491
8/8 [==============================] - 0s 2ms/step - loss: 1.1396 - mae: 0.8277

8/8 [==============================] - 0s 27ms/step - loss: 1.1396 - mae: 0.8277 - val_loss: 0.0497 - val_mae: 0.1898
Epoch 7/50

1/8 [==>...........................] - ETA: 0s - loss: 0.8330 - mae: 0.7139
8/8 [==============================] - 0s 2ms/step - loss: 0.9550 - mae: 0.7772

8/8 [==============================] - 0s 26ms/step - loss: 0.9550 - mae: 0.7772 - val_loss: 0.0562 - val_mae: 0.2043
Epoch 8/50

1/8 [==>...........................] - ETA: 0s - loss: 1.1450 - mae: 0.8590
8/8 [==============================] - 0s 2ms/step - loss: 0.9500 - mae: 0.7516

8/8 [==============================] - 0s 27ms/step - loss: 0.9500 - mae: 0.7516 - val_loss: 0.0581 - val_mae: 0.2106
Epoch 9/50

1/8 [==>...........................] - ETA: 0s - loss: 0.7179 - mae: 0.6634
8/8 [==============================] - 0s 3ms/step - loss: 1.0595 - mae: 0.7754

8/8 [==============================] - 0s 31ms/step - loss: 1.0595 - mae: 0.7754 - val_loss: 0.0708 - val_mae: 0.2371
Epoch 10/50

1/8 [==>...........................] - ETA: 0s - loss: 1.0918 - mae: 0.8114
8/8 [==============================] - 0s 2ms/step - loss: 0.7788 - mae: 0.6950

8/8 [==============================] - 0s 31ms/step - loss: 0.7788 - mae: 0.6950 - val_loss: 0.0775 - val_mae: 0.2510
Epoch 11/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6690 - mae: 0.6591
8/8 [==============================] - 0s 2ms/step - loss: 0.7372 - mae: 0.6803

8/8 [==============================] - 0s 27ms/step - loss: 0.7372 - mae: 0.6803 - val_loss: 0.0831 - val_mae: 0.2625
Epoch 12/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5660 - mae: 0.6334
8/8 [==============================] - 0s 2ms/step - loss: 0.7613 - mae: 0.6783

8/8 [==============================] - 0s 25ms/step - loss: 0.7613 - mae: 0.6783 - val_loss: 0.0824 - val_mae: 0.2613
Epoch 13/50

1/8 [==>...........................] - ETA: 0s - loss: 1.1337 - mae: 0.8435
8/8 [==============================] - 0s 2ms/step - loss: 0.8994 - mae: 0.7618

8/8 [==============================] - 0s 27ms/step - loss: 0.8994 - mae: 0.7618 - val_loss: 0.0904 - val_mae: 0.2769
Epoch 14/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6967 - mae: 0.6739
8/8 [==============================] - 0s 3ms/step - loss: 0.7366 - mae: 0.6818

8/8 [==============================] - 0s 26ms/step - loss: 0.7366 - mae: 0.6818 - val_loss: 0.0944 - val_mae: 0.2845
Epoch 15/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6134 - mae: 0.6227
8/8 [==============================] - 0s 2ms/step - loss: 0.7431 - mae: 0.6799

8/8 [==============================] - 0s 26ms/step - loss: 0.7431 - mae: 0.6799 - val_loss: 0.1031 - val_mae: 0.2999
Epoch 16/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4866 - mae: 0.5531
8/8 [==============================] - 0s 2ms/step - loss: 0.6725 - mae: 0.6414

8/8 [==============================] - 0s 26ms/step - loss: 0.6725 - mae: 0.6414 - val_loss: 0.1059 - val_mae: 0.3047
Epoch 17/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5633 - mae: 0.6194
8/8 [==============================] - 0s 2ms/step - loss: 0.5691 - mae: 0.6267

8/8 [==============================] - 0s 27ms/step - loss: 0.5691 - mae: 0.6267 - val_loss: 0.1106 - val_mae: 0.3128
Epoch 18/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6857 - mae: 0.7005
8/8 [==============================] - 0s 2ms/step - loss: 0.6734 - mae: 0.6620

8/8 [==============================] - 0s 29ms/step - loss: 0.6734 - mae: 0.6620 - val_loss: 0.1121 - val_mae: 0.3155
Epoch 19/50

1/8 [==>...........................] - ETA: 0s - loss: 0.6975 - mae: 0.7039
8/8 [==============================] - 0s 2ms/step - loss: 0.5593 - mae: 0.6083

8/8 [==============================] - 0s 25ms/step - loss: 0.5593 - mae: 0.6083 - val_loss: 0.1118 - val_mae: 0.3150
Epoch 20/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5050 - mae: 0.5940
8/8 [==============================] - 0s 2ms/step - loss: 0.5643 - mae: 0.6019

8/8 [==============================] - 0s 27ms/step - loss: 0.5643 - mae: 0.6019 - val_loss: 0.1118 - val_mae: 0.3151
Epoch 21/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4516 - mae: 0.5478
8/8 [==============================] - 0s 2ms/step - loss: 0.5450 - mae: 0.6124

8/8 [==============================] - 0s 27ms/step - loss: 0.5450 - mae: 0.6124 - val_loss: 0.1144 - val_mae: 0.3191
Epoch 22/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5456 - mae: 0.5900
8/8 [==============================] - 0s 2ms/step - loss: 0.4770 - mae: 0.5622

8/8 [==============================] - 0s 27ms/step - loss: 0.4770 - mae: 0.5622 - val_loss: 0.1157 - val_mae: 0.3210
Epoch 23/50

1/8 [==>...........................] - ETA: 0s - loss: 0.7221 - mae: 0.7078
8/8 [==============================] - 0s 3ms/step - loss: 0.5348 - mae: 0.5784

8/8 [==============================] - 0s 27ms/step - loss: 0.5348 - mae: 0.5784 - val_loss: 0.1139 - val_mae: 0.3182
Epoch 24/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5435 - mae: 0.5976
8/8 [==============================] - 0s 3ms/step - loss: 0.5149 - mae: 0.5733

8/8 [==============================] - 0s 26ms/step - loss: 0.5149 - mae: 0.5733 - val_loss: 0.1187 - val_mae: 0.3256
Epoch 25/50

1/8 [==>...........................] - ETA: 0s - loss: 0.8046 - mae: 0.6876
8/8 [==============================] - 0s 2ms/step - loss: 0.5100 - mae: 0.5673

8/8 [==============================] - 0s 27ms/step - loss: 0.5100 - mae: 0.5673 - val_loss: 0.1229 - val_mae: 0.3318
Epoch 26/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4042 - mae: 0.4731
8/8 [==============================] - 0s 2ms/step - loss: 0.4315 - mae: 0.5246

8/8 [==============================] - 0s 26ms/step - loss: 0.4315 - mae: 0.5246 - val_loss: 0.1271 - val_mae: 0.3379
Epoch 27/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3742 - mae: 0.4945
8/8 [==============================] - 0s 2ms/step - loss: 0.4790 - mae: 0.5496

8/8 [==============================] - 0s 27ms/step - loss: 0.4790 - mae: 0.5496 - val_loss: 0.1270 - val_mae: 0.3377
Epoch 28/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4205 - mae: 0.5221
8/8 [==============================] - 0s 2ms/step - loss: 0.5005 - mae: 0.5603

8/8 [==============================] - 0s 24ms/step - loss: 0.5005 - mae: 0.5603 - val_loss: 0.1242 - val_mae: 0.3336
Epoch 29/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4239 - mae: 0.5269
8/8 [==============================] - 0s 2ms/step - loss: 0.4191 - mae: 0.5288

8/8 [==============================] - 0s 25ms/step - loss: 0.4191 - mae: 0.5288 - val_loss: 0.1237 - val_mae: 0.3327
Epoch 30/50

1/8 [==>...........................] - ETA: 0s - loss: 0.7339 - mae: 0.6234
8/8 [==============================] - 0s 3ms/step - loss: 0.4815 - mae: 0.5531

8/8 [==============================] - 0s 27ms/step - loss: 0.4815 - mae: 0.5531 - val_loss: 0.1243 - val_mae: 0.3336
Epoch 31/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4406 - mae: 0.5226
8/8 [==============================] - 0s 2ms/step - loss: 0.3978 - mae: 0.5062

8/8 [==============================] - 0s 26ms/step - loss: 0.3978 - mae: 0.5062 - val_loss: 0.1257 - val_mae: 0.3356
Epoch 32/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3946 - mae: 0.5240
8/8 [==============================] - 0s 2ms/step - loss: 0.4624 - mae: 0.5480

8/8 [==============================] - 0s 26ms/step - loss: 0.4624 - mae: 0.5480 - val_loss: 0.1252 - val_mae: 0.3346
Epoch 33/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3298 - mae: 0.4985
8/8 [==============================] - 0s 2ms/step - loss: 0.3753 - mae: 0.5043

8/8 [==============================] - 0s 26ms/step - loss: 0.3753 - mae: 0.5043 - val_loss: 0.1244 - val_mae: 0.3334
Epoch 34/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4843 - mae: 0.5500
8/8 [==============================] - 0s 2ms/step - loss: 0.3867 - mae: 0.5142

8/8 [==============================] - 0s 26ms/step - loss: 0.3867 - mae: 0.5142 - val_loss: 0.1253 - val_mae: 0.3346
Epoch 35/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4111 - mae: 0.5568
8/8 [==============================] - 0s 2ms/step - loss: 0.4045 - mae: 0.5114

8/8 [==============================] - 0s 27ms/step - loss: 0.4045 - mae: 0.5114 - val_loss: 0.1240 - val_mae: 0.3326
Epoch 36/50

1/8 [==>...........................] - ETA: 0s - loss: 0.9461 - mae: 0.7226
8/8 [==============================] - 0s 3ms/step - loss: 0.4537 - mae: 0.5292

8/8 [==============================] - 0s 30ms/step - loss: 0.4537 - mae: 0.5292 - val_loss: 0.1244 - val_mae: 0.3331
Epoch 37/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3333 - mae: 0.4965
8/8 [==============================] - 0s 2ms/step - loss: 0.3745 - mae: 0.4975

8/8 [==============================] - 0s 30ms/step - loss: 0.3745 - mae: 0.4975 - val_loss: 0.1250 - val_mae: 0.3340
Epoch 38/50

1/8 [==>...........................] - ETA: 0s - loss: 0.4844 - mae: 0.4581
8/8 [==============================] - 0s 2ms/step - loss: 0.3294 - mae: 0.4419

8/8 [==============================] - 0s 27ms/step - loss: 0.3294 - mae: 0.4419 - val_loss: 0.1243 - val_mae: 0.3329
Epoch 39/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3390 - mae: 0.4958
8/8 [==============================] - 0s 2ms/step - loss: 0.3597 - mae: 0.5070

8/8 [==============================] - 0s 25ms/step - loss: 0.3597 - mae: 0.5070 - val_loss: 0.1214 - val_mae: 0.3284
Epoch 40/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3798 - mae: 0.4689
8/8 [==============================] - 0s 3ms/step - loss: 0.3117 - mae: 0.4688

8/8 [==============================] - 0s 27ms/step - loss: 0.3117 - mae: 0.4688 - val_loss: 0.1203 - val_mae: 0.3267
Epoch 41/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3472 - mae: 0.4775
8/8 [==============================] - 0s 3ms/step - loss: 0.3256 - mae: 0.4598

8/8 [==============================] - 0s 26ms/step - loss: 0.3256 - mae: 0.4598 - val_loss: 0.1173 - val_mae: 0.3220
Epoch 42/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3248 - mae: 0.4639
8/8 [==============================] - 0s 2ms/step - loss: 0.3190 - mae: 0.4600

8/8 [==============================] - 0s 27ms/step - loss: 0.3190 - mae: 0.4600 - val_loss: 0.1161 - val_mae: 0.3199
Epoch 43/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2964 - mae: 0.4238
8/8 [==============================] - 0s 2ms/step - loss: 0.3293 - mae: 0.4825

8/8 [==============================] - 0s 25ms/step - loss: 0.3293 - mae: 0.4825 - val_loss: 0.1156 - val_mae: 0.3193
Epoch 44/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3573 - mae: 0.4840
8/8 [==============================] - 0s 2ms/step - loss: 0.3032 - mae: 0.4537

8/8 [==============================] - 0s 27ms/step - loss: 0.3032 - mae: 0.4537 - val_loss: 0.1144 - val_mae: 0.3172
Epoch 45/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3188 - mae: 0.4402
8/8 [==============================] - 0s 3ms/step - loss: 0.3187 - mae: 0.4427

8/8 [==============================] - 0s 29ms/step - loss: 0.3187 - mae: 0.4427 - val_loss: 0.1132 - val_mae: 0.3153
Epoch 46/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2621 - mae: 0.4244
8/8 [==============================] - 0s 3ms/step - loss: 0.3040 - mae: 0.4479

8/8 [==============================] - 0s 33ms/step - loss: 0.3040 - mae: 0.4479 - val_loss: 0.1132 - val_mae: 0.3153
Epoch 47/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2580 - mae: 0.3932
8/8 [==============================] - 0s 2ms/step - loss: 0.3194 - mae: 0.4634

8/8 [==============================] - 0s 27ms/step - loss: 0.3194 - mae: 0.4634 - val_loss: 0.1134 - val_mae: 0.3156
Epoch 48/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2515 - mae: 0.4058
8/8 [==============================] - 0s 2ms/step - loss: 0.2596 - mae: 0.4331

8/8 [==============================] - 0s 27ms/step - loss: 0.2596 - mae: 0.4331 - val_loss: 0.1113 - val_mae: 0.3122
Epoch 49/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3281 - mae: 0.4612
8/8 [==============================] - 0s 2ms/step - loss: 0.2647 - mae: 0.4180

8/8 [==============================] - 0s 27ms/step - loss: 0.2647 - mae: 0.4180 - val_loss: 0.1080 - val_mae: 0.3069
Epoch 50/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2798 - mae: 0.4569
8/8 [==============================] - 0s 2ms/step - loss: 0.2402 - mae: 0.4058

8/8 [==============================] - 0s 27ms/step - loss: 0.2402 - mae: 0.4058 - val_loss: 0.1055 - val_mae: 0.3028

Run completed: runs/2022-12-07T02-51-53Z

Training run 40/52 (flags = list(32, 10, 0.001, 50, 30, "sigmoid", "tanh", 0.6, 0.6)) 
Using run directory runs/2022-12-07T02-52-14Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

1/8 [==>...........................] - ETA: 2s - loss: 3.6745 - mae: 1.6732
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0018s vs `on_train_batch_end` time: 0.0034s). Check your callbacks.

8/8 [==============================] - 0s 2ms/step - loss: 2.9298 - mae: 1.3497

8/8 [==============================] - 1s 103ms/step - loss: 2.9298 - mae: 1.3497 - val_loss: 0.8345 - val_mae: 0.7173
Epoch 2/30

1/8 [==>...........................] - ETA: 0s - loss: 2.9937 - mae: 1.4471
8/8 [==============================] - 0s 2ms/step - loss: 2.6959 - mae: 1.3124

8/8 [==============================] - 0s 26ms/step - loss: 2.6959 - mae: 1.3124 - val_loss: 0.7217 - val_mae: 0.6820
Epoch 3/30

1/8 [==>...........................] - ETA: 0s - loss: 2.3771 - mae: 1.2523
8/8 [==============================] - 0s 2ms/step - loss: 2.3895 - mae: 1.2387

8/8 [==============================] - 0s 28ms/step - loss: 2.3895 - mae: 1.2387 - val_loss: 0.6455 - val_mae: 0.6546
Epoch 4/30

1/8 [==>...........................] - ETA: 0s - loss: 2.2047 - mae: 1.1163
8/8 [==============================] - 0s 2ms/step - loss: 2.3419 - mae: 1.2083

8/8 [==============================] - 0s 29ms/step - loss: 2.3419 - mae: 1.2083 - val_loss: 0.5835 - val_mae: 0.6268
Epoch 5/30

1/8 [==>...........................] - ETA: 0s - loss: 3.0060 - mae: 1.3933
8/8 [==============================] - 0s 2ms/step - loss: 2.4994 - mae: 1.2837

8/8 [==============================] - 0s 27ms/step - loss: 2.4994 - mae: 1.2837 - val_loss: 0.5236 - val_mae: 0.5990
Epoch 6/30

1/8 [==>...........................] - ETA: 0s - loss: 2.5545 - mae: 1.2672
8/8 [==============================] - 0s 2ms/step - loss: 2.4599 - mae: 1.2373

8/8 [==============================] - 0s 27ms/step - loss: 2.4599 - mae: 1.2373 - val_loss: 0.4752 - val_mae: 0.5783
Epoch 7/30

1/8 [==>...........................] - ETA: 0s - loss: 2.3298 - mae: 1.1619
8/8 [==============================] - 0s 3ms/step - loss: 2.2637 - mae: 1.1732

8/8 [==============================] - 0s 27ms/step - loss: 2.2637 - mae: 1.1732 - val_loss: 0.4363 - val_mae: 0.5601
Epoch 8/30

1/8 [==>...........................] - ETA: 0s - loss: 1.9917 - mae: 1.1441
8/8 [==============================] - 0s 3ms/step - loss: 2.1630 - mae: 1.1739

8/8 [==============================] - 0s 34ms/step - loss: 2.1630 - mae: 1.1739 - val_loss: 0.4072 - val_mae: 0.5426
Epoch 9/30

1/8 [==>...........................] - ETA: 0s - loss: 1.5303 - mae: 1.0156
8/8 [==============================] - 0s 3ms/step - loss: 2.1080 - mae: 1.1357

8/8 [==============================] - 0s 30ms/step - loss: 2.1080 - mae: 1.1357 - val_loss: 0.3836 - val_mae: 0.5292
Epoch 10/30

1/8 [==>...........................] - ETA: 0s - loss: 1.5766 - mae: 0.9899
8/8 [==============================] - 0s 2ms/step - loss: 1.8099 - mae: 1.0752

8/8 [==============================] - 0s 27ms/step - loss: 1.8099 - mae: 1.0752 - val_loss: 0.3649 - val_mae: 0.5153
Epoch 11/30

1/8 [==>...........................] - ETA: 0s - loss: 1.6822 - mae: 1.0672
8/8 [==============================] - 0s 2ms/step - loss: 1.7511 - mae: 1.0590

8/8 [==============================] - 0s 27ms/step - loss: 1.7511 - mae: 1.0590 - val_loss: 0.3461 - val_mae: 0.5004
Epoch 12/30

1/8 [==>...........................] - ETA: 0s - loss: 2.1493 - mae: 1.1170
8/8 [==============================] - 0s 2ms/step - loss: 1.7361 - mae: 1.0511

8/8 [==============================] - 0s 26ms/step - loss: 1.7361 - mae: 1.0511 - val_loss: 0.3269 - val_mae: 0.4882
Epoch 13/30

1/8 [==>...........................] - ETA: 0s - loss: 1.9162 - mae: 1.1201
8/8 [==============================] - 0s 3ms/step - loss: 1.8004 - mae: 1.0484

8/8 [==============================] - 0s 27ms/step - loss: 1.8004 - mae: 1.0484 - val_loss: 0.3040 - val_mae: 0.4737
Epoch 14/30

1/8 [==>...........................] - ETA: 0s - loss: 2.1983 - mae: 1.2553
8/8 [==============================] - 0s 2ms/step - loss: 1.6973 - mae: 1.0182

8/8 [==============================] - 0s 26ms/step - loss: 1.6973 - mae: 1.0182 - val_loss: 0.2837 - val_mae: 0.4590
Epoch 15/30

1/8 [==>...........................] - ETA: 0s - loss: 1.7856 - mae: 1.0187
8/8 [==============================] - 0s 2ms/step - loss: 1.4317 - mae: 0.9313

8/8 [==============================] - 0s 27ms/step - loss: 1.4317 - mae: 0.9313 - val_loss: 0.2717 - val_mae: 0.4489
Epoch 16/30

1/8 [==>...........................] - ETA: 0s - loss: 1.6964 - mae: 1.0270
8/8 [==============================] - 0s 2ms/step - loss: 1.5229 - mae: 0.9748

8/8 [==============================] - 0s 29ms/step - loss: 1.5229 - mae: 0.9748 - val_loss: 0.2557 - val_mae: 0.4368
Epoch 17/30

1/8 [==>...........................] - ETA: 0s - loss: 2.1384 - mae: 1.1901
8/8 [==============================] - 0s 3ms/step - loss: 1.7236 - mae: 1.0509

8/8 [==============================] - 0s 31ms/step - loss: 1.7236 - mae: 1.0509 - val_loss: 0.2427 - val_mae: 0.4250
Epoch 18/30

1/8 [==>...........................] - ETA: 0s - loss: 1.2525 - mae: 0.8940
8/8 [==============================] - 0s 2ms/step - loss: 1.4842 - mae: 0.9521

8/8 [==============================] - 0s 27ms/step - loss: 1.4842 - mae: 0.9521 - val_loss: 0.2294 - val_mae: 0.4132
Epoch 19/30

1/8 [==>...........................] - ETA: 0s - loss: 1.6215 - mae: 1.0325
8/8 [==============================] - 0s 3ms/step - loss: 1.5527 - mae: 0.9780

8/8 [==============================] - 0s 27ms/step - loss: 1.5527 - mae: 0.9780 - val_loss: 0.2184 - val_mae: 0.4031
Epoch 20/30

1/8 [==>...........................] - ETA: 0s - loss: 1.0388 - mae: 0.8411
8/8 [==============================] - 0s 2ms/step - loss: 1.2257 - mae: 0.8810

8/8 [==============================] - 0s 26ms/step - loss: 1.2257 - mae: 0.8810 - val_loss: 0.2102 - val_mae: 0.3955
Epoch 21/30

1/8 [==>...........................] - ETA: 0s - loss: 0.8186 - mae: 0.6849
8/8 [==============================] - 0s 2ms/step - loss: 1.2734 - mae: 0.8839

8/8 [==============================] - 0s 27ms/step - loss: 1.2734 - mae: 0.8839 - val_loss: 0.2016 - val_mae: 0.3879
Epoch 22/30

1/8 [==>...........................] - ETA: 0s - loss: 1.2784 - mae: 0.9150
8/8 [==============================] - 0s 2ms/step - loss: 1.1358 - mae: 0.8604

8/8 [==============================] - 0s 26ms/step - loss: 1.1358 - mae: 0.8604 - val_loss: 0.1931 - val_mae: 0.3793
Epoch 23/30

1/8 [==>...........................] - ETA: 0s - loss: 1.7298 - mae: 0.9457
8/8 [==============================] - 0s 2ms/step - loss: 1.2285 - mae: 0.8684

8/8 [==============================] - 0s 32ms/step - loss: 1.2285 - mae: 0.8684 - val_loss: 0.1851 - val_mae: 0.3692
Epoch 24/30

1/8 [==>...........................] - ETA: 0s - loss: 1.4434 - mae: 0.9364
8/8 [==============================] - 0s 3ms/step - loss: 1.3430 - mae: 0.9195

8/8 [==============================] - 0s 36ms/step - loss: 1.3430 - mae: 0.9195 - val_loss: 0.1785 - val_mae: 0.3611
Epoch 25/30

1/8 [==>...........................] - ETA: 0s - loss: 0.9298 - mae: 0.7719
8/8 [==============================] - 0s 2ms/step - loss: 1.1447 - mae: 0.8406

8/8 [==============================] - 0s 29ms/step - loss: 1.1447 - mae: 0.8406 - val_loss: 0.1691 - val_mae: 0.3519
Epoch 26/30

1/8 [==>...........................] - ETA: 0s - loss: 1.0212 - mae: 0.8478
8/8 [==============================] - 0s 2ms/step - loss: 1.1627 - mae: 0.8767

8/8 [==============================] - 0s 24ms/step - loss: 1.1627 - mae: 0.8767 - val_loss: 0.1612 - val_mae: 0.3447
Epoch 27/30

1/8 [==>...........................] - ETA: 0s - loss: 1.0661 - mae: 0.8615
8/8 [==============================] - 0s 3ms/step - loss: 1.0526 - mae: 0.8152

8/8 [==============================] - 0s 26ms/step - loss: 1.0526 - mae: 0.8152 - val_loss: 0.1544 - val_mae: 0.3369
Epoch 28/30

1/8 [==>...........................] - ETA: 0s - loss: 0.8126 - mae: 0.7627
8/8 [==============================] - 0s 2ms/step - loss: 0.9890 - mae: 0.8006

8/8 [==============================] - 0s 27ms/step - loss: 0.9890 - mae: 0.8006 - val_loss: 0.1499 - val_mae: 0.3306
Epoch 29/30

1/8 [==>...........................] - ETA: 0s - loss: 0.9041 - mae: 0.7406
8/8 [==============================] - 0s 2ms/step - loss: 1.0216 - mae: 0.8101

8/8 [==============================] - 0s 26ms/step - loss: 1.0216 - mae: 0.8101 - val_loss: 0.1445 - val_mae: 0.3245
Epoch 30/30

1/8 [==>...........................] - ETA: 0s - loss: 0.7218 - mae: 0.7053
8/8 [==============================] - 0s 2ms/step - loss: 1.0412 - mae: 0.8150

8/8 [==============================] - 0s 27ms/step - loss: 1.0412 - mae: 0.8150 - val_loss: 0.1388 - val_mae: 0.3181

Run completed: runs/2022-12-07T02-52-14Z

Training run 41/52 (flags = list(16, 50, 0.001, 30, 50, "sigmoid", "relu", 0.2, 0.6)) 
Using run directory runs/2022-12-07T02-52-31Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

 1/13 [=>............................] - ETA: 4s - loss: 1.5261 - mae: 1.1224
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0014s vs `on_train_batch_end` time: 0.0049s). Check your callbacks.

13/13 [==============================] - 0s 2ms/step - loss: 1.1717 - mae: 0.9678

13/13 [==============================] - 1s 69ms/step - loss: 1.1717 - mae: 0.9678 - val_loss: 0.6261 - val_mae: 0.7684
Epoch 2/50

 1/13 [=>............................] - ETA: 0s - loss: 0.8381 - mae: 0.8264
13/13 [==============================] - 0s 2ms/step - loss: 0.8432 - mae: 0.8081

13/13 [==============================] - 0s 17ms/step - loss: 0.8432 - mae: 0.8081 - val_loss: 0.4261 - val_mae: 0.6256
Epoch 3/50

 1/13 [=>............................] - ETA: 0s - loss: 0.7760 - mae: 0.7934
13/13 [==============================] - 0s 2ms/step - loss: 0.7209 - mae: 0.7272

13/13 [==============================] - 0s 17ms/step - loss: 0.7209 - mae: 0.7272 - val_loss: 0.3056 - val_mae: 0.5224
Epoch 4/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5276 - mae: 0.5993
13/13 [==============================] - 0s 3ms/step - loss: 0.5754 - mae: 0.6454

13/13 [==============================] - 0s 21ms/step - loss: 0.5754 - mae: 0.6454 - val_loss: 0.2128 - val_mae: 0.4254
Epoch 5/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4536 - mae: 0.5698
13/13 [==============================] - 0s 3ms/step - loss: 0.4567 - mae: 0.5690

13/13 [==============================] - 0s 19ms/step - loss: 0.4567 - mae: 0.5690 - val_loss: 0.1546 - val_mae: 0.3511
Epoch 6/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3077 - mae: 0.4663
13/13 [==============================] - 0s 3ms/step - loss: 0.3468 - mae: 0.4913

13/13 [==============================] - 0s 18ms/step - loss: 0.3468 - mae: 0.4913 - val_loss: 0.1112 - val_mae: 0.2870
Epoch 7/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3791 - mae: 0.5041
13/13 [==============================] - 0s 2ms/step - loss: 0.3859 - mae: 0.5101

13/13 [==============================] - 0s 17ms/step - loss: 0.3859 - mae: 0.5101 - val_loss: 0.0808 - val_mae: 0.2370
Epoch 8/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2967 - mae: 0.4649
13/13 [==============================] - 0s 2ms/step - loss: 0.3016 - mae: 0.4442

13/13 [==============================] - 0s 17ms/step - loss: 0.3016 - mae: 0.4442 - val_loss: 0.0621 - val_mae: 0.2012
Epoch 9/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3391 - mae: 0.4737
13/13 [==============================] - 0s 2ms/step - loss: 0.3311 - mae: 0.4691

13/13 [==============================] - 0s 17ms/step - loss: 0.3311 - mae: 0.4691 - val_loss: 0.0475 - val_mae: 0.1695
Epoch 10/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3254 - mae: 0.4774
13/13 [==============================] - 0s 2ms/step - loss: 0.2639 - mae: 0.4115

13/13 [==============================] - 0s 17ms/step - loss: 0.2639 - mae: 0.4115 - val_loss: 0.0414 - val_mae: 0.1547
Epoch 11/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2501 - mae: 0.4053
13/13 [==============================] - 0s 3ms/step - loss: 0.2722 - mae: 0.4259

13/13 [==============================] - 0s 19ms/step - loss: 0.2722 - mae: 0.4259 - val_loss: 0.0363 - val_mae: 0.1407
Epoch 12/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2346 - mae: 0.3799
13/13 [==============================] - 0s 2ms/step - loss: 0.2634 - mae: 0.4236

13/13 [==============================] - 0s 17ms/step - loss: 0.2634 - mae: 0.4236 - val_loss: 0.0310 - val_mae: 0.1281
Epoch 13/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3371 - mae: 0.4456
13/13 [==============================] - 0s 3ms/step - loss: 0.2640 - mae: 0.4092

13/13 [==============================] - 0s 17ms/step - loss: 0.2640 - mae: 0.4092 - val_loss: 0.0283 - val_mae: 0.1223
Epoch 14/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3058 - mae: 0.4450
13/13 [==============================] - 0s 2ms/step - loss: 0.2584 - mae: 0.4038

13/13 [==============================] - 0s 15ms/step - loss: 0.2584 - mae: 0.4038 - val_loss: 0.0259 - val_mae: 0.1184
Epoch 15/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2823 - mae: 0.4342
13/13 [==============================] - 0s 2ms/step - loss: 0.2173 - mae: 0.3731

13/13 [==============================] - 0s 16ms/step - loss: 0.2173 - mae: 0.3731 - val_loss: 0.0264 - val_mae: 0.1180
Epoch 16/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2254 - mae: 0.3592
13/13 [==============================] - 0s 3ms/step - loss: 0.1903 - mae: 0.3495

13/13 [==============================] - 0s 19ms/step - loss: 0.1903 - mae: 0.3495 - val_loss: 0.0262 - val_mae: 0.1171
Epoch 17/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1445 - mae: 0.3068
13/13 [==============================] - 0s 2ms/step - loss: 0.1925 - mae: 0.3549

13/13 [==============================] - 0s 17ms/step - loss: 0.1925 - mae: 0.3549 - val_loss: 0.0244 - val_mae: 0.1136
Epoch 18/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2969 - mae: 0.4658
13/13 [==============================] - 0s 2ms/step - loss: 0.2272 - mae: 0.3919

13/13 [==============================] - 0s 15ms/step - loss: 0.2272 - mae: 0.3919 - val_loss: 0.0228 - val_mae: 0.1104
Epoch 19/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1631 - mae: 0.3309
13/13 [==============================] - 0s 2ms/step - loss: 0.2097 - mae: 0.3626

13/13 [==============================] - 0s 17ms/step - loss: 0.2097 - mae: 0.3626 - val_loss: 0.0212 - val_mae: 0.1073
Epoch 20/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1907 - mae: 0.3710
13/13 [==============================] - 0s 2ms/step - loss: 0.1853 - mae: 0.3541

13/13 [==============================] - 0s 16ms/step - loss: 0.1853 - mae: 0.3541 - val_loss: 0.0202 - val_mae: 0.1054
Epoch 21/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2040 - mae: 0.3338
13/13 [==============================] - 0s 2ms/step - loss: 0.1968 - mae: 0.3560

13/13 [==============================] - 0s 17ms/step - loss: 0.1968 - mae: 0.3560 - val_loss: 0.0194 - val_mae: 0.1035
Epoch 22/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1548 - mae: 0.3224
13/13 [==============================] - 0s 2ms/step - loss: 0.1860 - mae: 0.3494

13/13 [==============================] - 0s 15ms/step - loss: 0.1860 - mae: 0.3494 - val_loss: 0.0190 - val_mae: 0.1022
Epoch 23/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2195 - mae: 0.4103
13/13 [==============================] - 0s 2ms/step - loss: 0.1677 - mae: 0.3338

13/13 [==============================] - 0s 17ms/step - loss: 0.1677 - mae: 0.3338 - val_loss: 0.0185 - val_mae: 0.1010
Epoch 24/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2634 - mae: 0.4308
13/13 [==============================] - 0s 2ms/step - loss: 0.1974 - mae: 0.3559

13/13 [==============================] - 0s 15ms/step - loss: 0.1974 - mae: 0.3559 - val_loss: 0.0183 - val_mae: 0.0998
Epoch 25/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0719 - mae: 0.1993
13/13 [==============================] - 0s 2ms/step - loss: 0.1764 - mae: 0.3389

13/13 [==============================] - 0s 16ms/step - loss: 0.1764 - mae: 0.3389 - val_loss: 0.0180 - val_mae: 0.0987
Epoch 26/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1818 - mae: 0.3629
13/13 [==============================] - 0s 2ms/step - loss: 0.1578 - mae: 0.3198

13/13 [==============================] - 0s 16ms/step - loss: 0.1578 - mae: 0.3198 - val_loss: 0.0176 - val_mae: 0.0974
Epoch 27/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1398 - mae: 0.3107
13/13 [==============================] - 0s 2ms/step - loss: 0.1647 - mae: 0.3252

13/13 [==============================] - 0s 15ms/step - loss: 0.1647 - mae: 0.3252 - val_loss: 0.0169 - val_mae: 0.0963
Epoch 28/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2209 - mae: 0.3666
13/13 [==============================] - 0s 3ms/step - loss: 0.1562 - mae: 0.3139

13/13 [==============================] - 0s 17ms/step - loss: 0.1562 - mae: 0.3139 - val_loss: 0.0167 - val_mae: 0.0955
Epoch 29/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1547 - mae: 0.3109
13/13 [==============================] - 0s 2ms/step - loss: 0.1748 - mae: 0.3310

13/13 [==============================] - 0s 16ms/step - loss: 0.1748 - mae: 0.3310 - val_loss: 0.0164 - val_mae: 0.0945
Epoch 30/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2217 - mae: 0.3914
13/13 [==============================] - 0s 2ms/step - loss: 0.1525 - mae: 0.3159

13/13 [==============================] - 0s 16ms/step - loss: 0.1525 - mae: 0.3159 - val_loss: 0.0162 - val_mae: 0.0939
Epoch 31/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1568 - mae: 0.3058
13/13 [==============================] - 0s 2ms/step - loss: 0.1553 - mae: 0.3171

13/13 [==============================] - 0s 15ms/step - loss: 0.1553 - mae: 0.3171 - val_loss: 0.0157 - val_mae: 0.0927
Epoch 32/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1068 - mae: 0.2599
13/13 [==============================] - 0s 2ms/step - loss: 0.1607 - mae: 0.3234

13/13 [==============================] - 0s 15ms/step - loss: 0.1607 - mae: 0.3234 - val_loss: 0.0153 - val_mae: 0.0922
Epoch 33/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1246 - mae: 0.3020
13/13 [==============================] - 0s 2ms/step - loss: 0.1491 - mae: 0.3147

13/13 [==============================] - 0s 16ms/step - loss: 0.1491 - mae: 0.3147 - val_loss: 0.0150 - val_mae: 0.0922
Epoch 34/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0900 - mae: 0.2410
13/13 [==============================] - 0s 3ms/step - loss: 0.1405 - mae: 0.2965

13/13 [==============================] - 0s 17ms/step - loss: 0.1405 - mae: 0.2965 - val_loss: 0.0147 - val_mae: 0.0916
Epoch 35/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1389 - mae: 0.2897
13/13 [==============================] - 0s 2ms/step - loss: 0.1422 - mae: 0.3056

13/13 [==============================] - 0s 15ms/step - loss: 0.1422 - mae: 0.3056 - val_loss: 0.0144 - val_mae: 0.0915
Epoch 36/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1701 - mae: 0.3638
13/13 [==============================] - 0s 2ms/step - loss: 0.1260 - mae: 0.2847

13/13 [==============================] - 0s 15ms/step - loss: 0.1260 - mae: 0.2847 - val_loss: 0.0142 - val_mae: 0.0911
Epoch 37/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1570 - mae: 0.2798
13/13 [==============================] - 0s 2ms/step - loss: 0.1364 - mae: 0.2947

13/13 [==============================] - 0s 17ms/step - loss: 0.1364 - mae: 0.2947 - val_loss: 0.0140 - val_mae: 0.0886
Epoch 38/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1577 - mae: 0.3248
13/13 [==============================] - 0s 2ms/step - loss: 0.1404 - mae: 0.3007

13/13 [==============================] - 0s 16ms/step - loss: 0.1404 - mae: 0.3007 - val_loss: 0.0139 - val_mae: 0.0875
Epoch 39/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1596 - mae: 0.3334
13/13 [==============================] - 0s 2ms/step - loss: 0.1419 - mae: 0.3033

13/13 [==============================] - 0s 17ms/step - loss: 0.1419 - mae: 0.3033 - val_loss: 0.0138 - val_mae: 0.0867
Epoch 40/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1159 - mae: 0.2728
13/13 [==============================] - 0s 2ms/step - loss: 0.1392 - mae: 0.3015

13/13 [==============================] - 0s 15ms/step - loss: 0.1392 - mae: 0.3015 - val_loss: 0.0138 - val_mae: 0.0858
Epoch 41/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0935 - mae: 0.2242
13/13 [==============================] - 0s 2ms/step - loss: 0.1151 - mae: 0.2697

13/13 [==============================] - 0s 16ms/step - loss: 0.1151 - mae: 0.2697 - val_loss: 0.0136 - val_mae: 0.0853
Epoch 42/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1670 - mae: 0.3399
13/13 [==============================] - 0s 2ms/step - loss: 0.1467 - mae: 0.3070

13/13 [==============================] - 0s 16ms/step - loss: 0.1467 - mae: 0.3070 - val_loss: 0.0136 - val_mae: 0.0851
Epoch 43/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1321 - mae: 0.2985
13/13 [==============================] - 0s 2ms/step - loss: 0.1472 - mae: 0.3070

13/13 [==============================] - 0s 15ms/step - loss: 0.1472 - mae: 0.3070 - val_loss: 0.0137 - val_mae: 0.0852
Epoch 44/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1047 - mae: 0.2659
13/13 [==============================] - 0s 3ms/step - loss: 0.1239 - mae: 0.2795

13/13 [==============================] - 0s 17ms/step - loss: 0.1239 - mae: 0.2795 - val_loss: 0.0132 - val_mae: 0.0841
Epoch 45/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0884 - mae: 0.2444
13/13 [==============================] - 0s 2ms/step - loss: 0.1103 - mae: 0.2700

13/13 [==============================] - 0s 17ms/step - loss: 0.1103 - mae: 0.2700 - val_loss: 0.0130 - val_mae: 0.0837
Epoch 46/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0593 - mae: 0.1836
13/13 [==============================] - 0s 2ms/step - loss: 0.1276 - mae: 0.2872

13/13 [==============================] - 0s 16ms/step - loss: 0.1276 - mae: 0.2872 - val_loss: 0.0129 - val_mae: 0.0834
Epoch 47/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1509 - mae: 0.3050
13/13 [==============================] - 0s 2ms/step - loss: 0.1291 - mae: 0.2867

13/13 [==============================] - 0s 17ms/step - loss: 0.1291 - mae: 0.2867 - val_loss: 0.0127 - val_mae: 0.0830
Epoch 48/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1518 - mae: 0.3378
13/13 [==============================] - 0s 2ms/step - loss: 0.1217 - mae: 0.2846

13/13 [==============================] - 0s 15ms/step - loss: 0.1217 - mae: 0.2846 - val_loss: 0.0126 - val_mae: 0.0822
Epoch 49/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1241 - mae: 0.2874
13/13 [==============================] - 0s 2ms/step - loss: 0.1180 - mae: 0.2713

13/13 [==============================] - 0s 16ms/step - loss: 0.1180 - mae: 0.2713 - val_loss: 0.0124 - val_mae: 0.0817
Epoch 50/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1638 - mae: 0.3496
13/13 [==============================] - 0s 2ms/step - loss: 0.1239 - mae: 0.2837

13/13 [==============================] - 0s 16ms/step - loss: 0.1239 - mae: 0.2837 - val_loss: 0.0123 - val_mae: 0.0812

Run completed: runs/2022-12-07T02-52-31Z

Training run 42/52 (flags = list(32, 10, 0.01, 30, 30, "relu", "relu", 0.2, 0.6)) 
Using run directory runs/2022-12-07T02-52-54Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

 1/13 [=>............................] - ETA: 4s - loss: 1.0334 - mae: 0.8689
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0013s vs `on_train_batch_end` time: 0.0027s). Check your callbacks.

13/13 [==============================] - 0s 2ms/step - loss: 0.8047 - mae: 0.7558

13/13 [==============================] - 1s 62ms/step - loss: 0.8047 - mae: 0.7558 - val_loss: 0.2041 - val_mae: 0.4131
Epoch 2/30

 1/13 [=>............................] - ETA: 0s - loss: 0.3914 - mae: 0.5359
13/13 [==============================] - 0s 2ms/step - loss: 0.3852 - mae: 0.5303

13/13 [==============================] - 0s 17ms/step - loss: 0.3852 - mae: 0.5303 - val_loss: 0.1226 - val_mae: 0.3056
Epoch 3/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1929 - mae: 0.3597
13/13 [==============================] - 0s 3ms/step - loss: 0.2630 - mae: 0.4378

13/13 [==============================] - 0s 18ms/step - loss: 0.2630 - mae: 0.4378 - val_loss: 0.0840 - val_mae: 0.2372
Epoch 4/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1544 - mae: 0.3379
13/13 [==============================] - 0s 3ms/step - loss: 0.1896 - mae: 0.3730

13/13 [==============================] - 0s 19ms/step - loss: 0.1896 - mae: 0.3730 - val_loss: 0.0842 - val_mae: 0.2413
Epoch 5/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1363 - mae: 0.3383
13/13 [==============================] - 0s 2ms/step - loss: 0.1547 - mae: 0.3390

13/13 [==============================] - 0s 17ms/step - loss: 0.1547 - mae: 0.3390 - val_loss: 0.0721 - val_mae: 0.2243
Epoch 6/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0936 - mae: 0.2740
13/13 [==============================] - 0s 2ms/step - loss: 0.1219 - mae: 0.2956

13/13 [==============================] - 0s 16ms/step - loss: 0.1219 - mae: 0.2956 - val_loss: 0.0560 - val_mae: 0.1948
Epoch 7/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1093 - mae: 0.2989
13/13 [==============================] - 0s 2ms/step - loss: 0.1041 - mae: 0.2669

13/13 [==============================] - 0s 17ms/step - loss: 0.1041 - mae: 0.2669 - val_loss: 0.0487 - val_mae: 0.1879
Epoch 8/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1022 - mae: 0.2623
13/13 [==============================] - 0s 3ms/step - loss: 0.0820 - mae: 0.2334

13/13 [==============================] - 0s 19ms/step - loss: 0.0820 - mae: 0.2334 - val_loss: 0.0384 - val_mae: 0.1626
Epoch 9/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0735 - mae: 0.2163
13/13 [==============================] - 0s 3ms/step - loss: 0.0729 - mae: 0.2160

13/13 [==============================] - 0s 18ms/step - loss: 0.0729 - mae: 0.2160 - val_loss: 0.0332 - val_mae: 0.1483
Epoch 10/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0348 - mae: 0.1614
13/13 [==============================] - 0s 2ms/step - loss: 0.0692 - mae: 0.2081

13/13 [==============================] - 0s 17ms/step - loss: 0.0692 - mae: 0.2081 - val_loss: 0.0295 - val_mae: 0.1386
Epoch 11/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0824 - mae: 0.2361
13/13 [==============================] - 0s 2ms/step - loss: 0.0716 - mae: 0.2064

13/13 [==============================] - 0s 17ms/step - loss: 0.0716 - mae: 0.2064 - val_loss: 0.0258 - val_mae: 0.1275
Epoch 12/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0611 - mae: 0.1966
13/13 [==============================] - 0s 2ms/step - loss: 0.0483 - mae: 0.1763

13/13 [==============================] - 0s 17ms/step - loss: 0.0483 - mae: 0.1763 - val_loss: 0.0233 - val_mae: 0.1196
Epoch 13/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0408 - mae: 0.1780
13/13 [==============================] - 0s 3ms/step - loss: 0.0450 - mae: 0.1687

13/13 [==============================] - 0s 17ms/step - loss: 0.0450 - mae: 0.1687 - val_loss: 0.0221 - val_mae: 0.1153
Epoch 14/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0701 - mae: 0.1990
13/13 [==============================] - 0s 3ms/step - loss: 0.0422 - mae: 0.1580

13/13 [==============================] - 0s 17ms/step - loss: 0.0422 - mae: 0.1580 - val_loss: 0.0217 - val_mae: 0.1144
Epoch 15/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0327 - mae: 0.1516
13/13 [==============================] - 0s 2ms/step - loss: 0.0426 - mae: 0.1599

13/13 [==============================] - 0s 17ms/step - loss: 0.0426 - mae: 0.1599 - val_loss: 0.0206 - val_mae: 0.1117
Epoch 16/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0377 - mae: 0.1621
13/13 [==============================] - 0s 3ms/step - loss: 0.0354 - mae: 0.1479

13/13 [==============================] - 0s 17ms/step - loss: 0.0354 - mae: 0.1479 - val_loss: 0.0202 - val_mae: 0.1108
Epoch 17/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0768 - mae: 0.1735
13/13 [==============================] - 0s 2ms/step - loss: 0.0357 - mae: 0.1422

13/13 [==============================] - 0s 17ms/step - loss: 0.0357 - mae: 0.1422 - val_loss: 0.0197 - val_mae: 0.1095
Epoch 18/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0272 - mae: 0.1369
13/13 [==============================] - 0s 2ms/step - loss: 0.0331 - mae: 0.1418

13/13 [==============================] - 0s 17ms/step - loss: 0.0331 - mae: 0.1418 - val_loss: 0.0190 - val_mae: 0.1076
Epoch 19/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0222 - mae: 0.1203
13/13 [==============================] - 0s 2ms/step - loss: 0.0338 - mae: 0.1385

13/13 [==============================] - 0s 17ms/step - loss: 0.0338 - mae: 0.1385 - val_loss: 0.0192 - val_mae: 0.1077
Epoch 20/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0455 - mae: 0.1520
13/13 [==============================] - 0s 2ms/step - loss: 0.0287 - mae: 0.1320

13/13 [==============================] - 0s 15ms/step - loss: 0.0287 - mae: 0.1320 - val_loss: 0.0189 - val_mae: 0.1071
Epoch 21/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0278 - mae: 0.1378
13/13 [==============================] - 0s 2ms/step - loss: 0.0316 - mae: 0.1376

13/13 [==============================] - 0s 17ms/step - loss: 0.0316 - mae: 0.1376 - val_loss: 0.0188 - val_mae: 0.1064
Epoch 22/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0194 - mae: 0.1123
13/13 [==============================] - 0s 2ms/step - loss: 0.0273 - mae: 0.1296

13/13 [==============================] - 0s 17ms/step - loss: 0.0273 - mae: 0.1296 - val_loss: 0.0186 - val_mae: 0.1058
Epoch 23/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0290 - mae: 0.1452
13/13 [==============================] - 0s 2ms/step - loss: 0.0297 - mae: 0.1375

13/13 [==============================] - 0s 17ms/step - loss: 0.0297 - mae: 0.1375 - val_loss: 0.0185 - val_mae: 0.1057
Epoch 24/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0182 - mae: 0.1148
13/13 [==============================] - 0s 2ms/step - loss: 0.0277 - mae: 0.1287

13/13 [==============================] - 0s 17ms/step - loss: 0.0277 - mae: 0.1287 - val_loss: 0.0185 - val_mae: 0.1057
Epoch 25/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0496 - mae: 0.1545
13/13 [==============================] - 0s 2ms/step - loss: 0.0246 - mae: 0.1238

13/13 [==============================] - 0s 17ms/step - loss: 0.0246 - mae: 0.1238 - val_loss: 0.0182 - val_mae: 0.1052
Epoch 26/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0218 - mae: 0.1151
13/13 [==============================] - 0s 2ms/step - loss: 0.0240 - mae: 0.1217

13/13 [==============================] - 0s 17ms/step - loss: 0.0240 - mae: 0.1217 - val_loss: 0.0185 - val_mae: 0.1057
Epoch 27/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0217 - mae: 0.1235
13/13 [==============================] - 0s 2ms/step - loss: 0.0238 - mae: 0.1238

13/13 [==============================] - 0s 17ms/step - loss: 0.0238 - mae: 0.1238 - val_loss: 0.0181 - val_mae: 0.1058
Epoch 28/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0231 - mae: 0.1328
13/13 [==============================] - 0s 3ms/step - loss: 0.0246 - mae: 0.1238

13/13 [==============================] - 0s 17ms/step - loss: 0.0246 - mae: 0.1238 - val_loss: 0.0180 - val_mae: 0.1055
Epoch 29/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0550 - mae: 0.1607
13/13 [==============================] - 0s 2ms/step - loss: 0.0286 - mae: 0.1286

13/13 [==============================] - 0s 17ms/step - loss: 0.0286 - mae: 0.1286 - val_loss: 0.0181 - val_mae: 0.1059
Epoch 30/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0211 - mae: 0.1173
13/13 [==============================] - 0s 2ms/step - loss: 0.0236 - mae: 0.1221

13/13 [==============================] - 0s 17ms/step - loss: 0.0236 - mae: 0.1221 - val_loss: 0.0180 - val_mae: 0.1057

Run completed: runs/2022-12-07T02-52-54Z

Training run 43/52 (flags = list(16, 32, 0.01, 30, 50, "tanh", "sigmoid", 0.2, 0.6)) 
Using run directory runs/2022-12-07T02-53-12Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

 1/13 [=>............................] - ETA: 4s - loss: 1.4842 - mae: 0.9185
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0017s vs `on_train_batch_end` time: 0.0038s). Check your callbacks.

13/13 [==============================] - 0s 2ms/step - loss: 0.8064 - mae: 0.7139

13/13 [==============================] - 1s 62ms/step - loss: 0.8064 - mae: 0.7139 - val_loss: 0.0142 - val_mae: 0.0979
Epoch 2/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4950 - mae: 0.5741
13/13 [==============================] - 0s 2ms/step - loss: 0.5288 - mae: 0.5808

13/13 [==============================] - 0s 17ms/step - loss: 0.5288 - mae: 0.5808 - val_loss: 0.0122 - val_mae: 0.0911
Epoch 3/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3995 - mae: 0.5005
13/13 [==============================] - 0s 3ms/step - loss: 0.4517 - mae: 0.5392

13/13 [==============================] - 0s 18ms/step - loss: 0.4517 - mae: 0.5392 - val_loss: 0.0088 - val_mae: 0.0775
Epoch 4/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5713 - mae: 0.6031
13/13 [==============================] - 0s 3ms/step - loss: 0.4189 - mae: 0.5204

13/13 [==============================] - 0s 19ms/step - loss: 0.4189 - mae: 0.5204 - val_loss: 0.0095 - val_mae: 0.0793
Epoch 5/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2535 - mae: 0.3794
13/13 [==============================] - 0s 2ms/step - loss: 0.3239 - mae: 0.4499

13/13 [==============================] - 0s 17ms/step - loss: 0.3239 - mae: 0.4499 - val_loss: 0.0081 - val_mae: 0.0749
Epoch 6/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3181 - mae: 0.4549
13/13 [==============================] - 0s 2ms/step - loss: 0.2788 - mae: 0.4200

13/13 [==============================] - 0s 17ms/step - loss: 0.2788 - mae: 0.4200 - val_loss: 0.0370 - val_mae: 0.1721
Epoch 7/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2151 - mae: 0.3613
13/13 [==============================] - 0s 2ms/step - loss: 0.2149 - mae: 0.3708

13/13 [==============================] - 0s 17ms/step - loss: 0.2149 - mae: 0.3708 - val_loss: 0.0076 - val_mae: 0.0703
Epoch 8/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1389 - mae: 0.3138
13/13 [==============================] - 0s 2ms/step - loss: 0.1973 - mae: 0.3462

13/13 [==============================] - 0s 17ms/step - loss: 0.1973 - mae: 0.3462 - val_loss: 0.0078 - val_mae: 0.0706
Epoch 9/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2085 - mae: 0.3768
13/13 [==============================] - 0s 3ms/step - loss: 0.1686 - mae: 0.3295

13/13 [==============================] - 0s 18ms/step - loss: 0.1686 - mae: 0.3295 - val_loss: 0.0114 - val_mae: 0.0849
Epoch 10/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1977 - mae: 0.3434
13/13 [==============================] - 0s 3ms/step - loss: 0.1498 - mae: 0.3142

13/13 [==============================] - 0s 19ms/step - loss: 0.1498 - mae: 0.3142 - val_loss: 0.0097 - val_mae: 0.0774
Epoch 11/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1141 - mae: 0.2612
13/13 [==============================] - 0s 2ms/step - loss: 0.1164 - mae: 0.2705

13/13 [==============================] - 0s 19ms/step - loss: 0.1164 - mae: 0.2705 - val_loss: 0.0090 - val_mae: 0.0734
Epoch 12/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1005 - mae: 0.2796
13/13 [==============================] - 0s 2ms/step - loss: 0.0947 - mae: 0.2497

13/13 [==============================] - 0s 16ms/step - loss: 0.0947 - mae: 0.2497 - val_loss: 0.0250 - val_mae: 0.1414
Epoch 13/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1157 - mae: 0.2349
13/13 [==============================] - 0s 3ms/step - loss: 0.0970 - mae: 0.2494

13/13 [==============================] - 0s 17ms/step - loss: 0.0970 - mae: 0.2494 - val_loss: 0.0162 - val_mae: 0.1090
Epoch 14/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0784 - mae: 0.2424
13/13 [==============================] - 0s 2ms/step - loss: 0.0881 - mae: 0.2405

13/13 [==============================] - 0s 17ms/step - loss: 0.0881 - mae: 0.2405 - val_loss: 0.0088 - val_mae: 0.0712
Epoch 15/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0689 - mae: 0.2099
13/13 [==============================] - 0s 2ms/step - loss: 0.0728 - mae: 0.2167

13/13 [==============================] - 0s 16ms/step - loss: 0.0728 - mae: 0.2167 - val_loss: 0.0123 - val_mae: 0.0924
Epoch 16/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0568 - mae: 0.1776
13/13 [==============================] - 0s 3ms/step - loss: 0.0594 - mae: 0.1896

13/13 [==============================] - 0s 17ms/step - loss: 0.0594 - mae: 0.1896 - val_loss: 0.0089 - val_mae: 0.0722
Epoch 17/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0641 - mae: 0.2051
13/13 [==============================] - 0s 3ms/step - loss: 0.0570 - mae: 0.1934

13/13 [==============================] - 0s 17ms/step - loss: 0.0570 - mae: 0.1934 - val_loss: 0.0105 - val_mae: 0.0820
Epoch 18/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0477 - mae: 0.1718
13/13 [==============================] - 0s 2ms/step - loss: 0.0526 - mae: 0.1841

13/13 [==============================] - 0s 17ms/step - loss: 0.0526 - mae: 0.1841 - val_loss: 0.0103 - val_mae: 0.0805
Epoch 19/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0544 - mae: 0.1914
13/13 [==============================] - 0s 3ms/step - loss: 0.0512 - mae: 0.1835

13/13 [==============================] - 0s 17ms/step - loss: 0.0512 - mae: 0.1835 - val_loss: 0.0097 - val_mae: 0.0736
Epoch 20/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0772 - mae: 0.2295
13/13 [==============================] - 0s 2ms/step - loss: 0.0570 - mae: 0.1897

13/13 [==============================] - 0s 17ms/step - loss: 0.0570 - mae: 0.1897 - val_loss: 0.0120 - val_mae: 0.0877
Epoch 21/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0801 - mae: 0.2468
13/13 [==============================] - 0s 3ms/step - loss: 0.0467 - mae: 0.1723

13/13 [==============================] - 0s 18ms/step - loss: 0.0467 - mae: 0.1723 - val_loss: 0.0100 - val_mae: 0.0744
Epoch 22/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0595 - mae: 0.1893
13/13 [==============================] - 0s 2ms/step - loss: 0.0409 - mae: 0.1595

13/13 [==============================] - 0s 16ms/step - loss: 0.0409 - mae: 0.1595 - val_loss: 0.0109 - val_mae: 0.0788
Epoch 23/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0329 - mae: 0.1597
13/13 [==============================] - 0s 3ms/step - loss: 0.0332 - mae: 0.1466

13/13 [==============================] - 0s 18ms/step - loss: 0.0332 - mae: 0.1466 - val_loss: 0.0109 - val_mae: 0.0783
Epoch 24/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0574 - mae: 0.1734
13/13 [==============================] - 0s 2ms/step - loss: 0.0460 - mae: 0.1710

13/13 [==============================] - 0s 17ms/step - loss: 0.0460 - mae: 0.1710 - val_loss: 0.0102 - val_mae: 0.0749
Epoch 25/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0279 - mae: 0.1316
13/13 [==============================] - 0s 3ms/step - loss: 0.0385 - mae: 0.1563

13/13 [==============================] - 0s 17ms/step - loss: 0.0385 - mae: 0.1563 - val_loss: 0.0185 - val_mae: 0.1192
Epoch 26/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0456 - mae: 0.1695
13/13 [==============================] - 0s 3ms/step - loss: 0.0367 - mae: 0.1550

13/13 [==============================] - 0s 17ms/step - loss: 0.0367 - mae: 0.1550 - val_loss: 0.0097 - val_mae: 0.0757
Epoch 27/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0285 - mae: 0.1407
13/13 [==============================] - 0s 2ms/step - loss: 0.0360 - mae: 0.1542

13/13 [==============================] - 0s 17ms/step - loss: 0.0360 - mae: 0.1542 - val_loss: 0.0094 - val_mae: 0.0727
Epoch 28/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0463 - mae: 0.1737
13/13 [==============================] - 0s 2ms/step - loss: 0.0350 - mae: 0.1498

13/13 [==============================] - 0s 17ms/step - loss: 0.0350 - mae: 0.1498 - val_loss: 0.0098 - val_mae: 0.0755
Epoch 29/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0319 - mae: 0.1489
13/13 [==============================] - 0s 3ms/step - loss: 0.0355 - mae: 0.1483

13/13 [==============================] - 0s 17ms/step - loss: 0.0355 - mae: 0.1483 - val_loss: 0.0115 - val_mae: 0.0851
Epoch 30/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0472 - mae: 0.1633
13/13 [==============================] - 0s 2ms/step - loss: 0.0324 - mae: 0.1426

13/13 [==============================] - 0s 16ms/step - loss: 0.0324 - mae: 0.1426 - val_loss: 0.0098 - val_mae: 0.0726
Epoch 31/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0428 - mae: 0.1801
13/13 [==============================] - 0s 2ms/step - loss: 0.0310 - mae: 0.1404

13/13 [==============================] - 0s 17ms/step - loss: 0.0310 - mae: 0.1404 - val_loss: 0.0107 - val_mae: 0.0769
Epoch 32/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0325 - mae: 0.1518
13/13 [==============================] - 0s 2ms/step - loss: 0.0327 - mae: 0.1454

13/13 [==============================] - 0s 17ms/step - loss: 0.0327 - mae: 0.1454 - val_loss: 0.0112 - val_mae: 0.0837
Epoch 33/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0315 - mae: 0.1486
13/13 [==============================] - 0s 2ms/step - loss: 0.0291 - mae: 0.1391

13/13 [==============================] - 0s 17ms/step - loss: 0.0291 - mae: 0.1391 - val_loss: 0.0101 - val_mae: 0.0777
Epoch 34/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0244 - mae: 0.1199
13/13 [==============================] - 0s 2ms/step - loss: 0.0292 - mae: 0.1355

13/13 [==============================] - 0s 17ms/step - loss: 0.0292 - mae: 0.1355 - val_loss: 0.0095 - val_mae: 0.0725
Epoch 35/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0355 - mae: 0.1521
13/13 [==============================] - 0s 2ms/step - loss: 0.0305 - mae: 0.1405

13/13 [==============================] - 0s 17ms/step - loss: 0.0305 - mae: 0.1405 - val_loss: 0.0098 - val_mae: 0.0757
Epoch 36/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0197 - mae: 0.1094
13/13 [==============================] - 0s 3ms/step - loss: 0.0260 - mae: 0.1248

13/13 [==============================] - 0s 17ms/step - loss: 0.0260 - mae: 0.1248 - val_loss: 0.0095 - val_mae: 0.0720
Epoch 37/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0237 - mae: 0.1270
13/13 [==============================] - 0s 2ms/step - loss: 0.0292 - mae: 0.1343

13/13 [==============================] - 0s 17ms/step - loss: 0.0292 - mae: 0.1343 - val_loss: 0.0097 - val_mae: 0.0744
Epoch 38/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0265 - mae: 0.1274
13/13 [==============================] - 0s 2ms/step - loss: 0.0278 - mae: 0.1327

13/13 [==============================] - 0s 17ms/step - loss: 0.0278 - mae: 0.1327 - val_loss: 0.0098 - val_mae: 0.0742
Epoch 39/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0223 - mae: 0.1258
13/13 [==============================] - 0s 2ms/step - loss: 0.0242 - mae: 0.1265

13/13 [==============================] - 0s 17ms/step - loss: 0.0242 - mae: 0.1265 - val_loss: 0.0102 - val_mae: 0.0777
Epoch 40/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0241 - mae: 0.1183
13/13 [==============================] - 0s 2ms/step - loss: 0.0232 - mae: 0.1207

13/13 [==============================] - 0s 17ms/step - loss: 0.0232 - mae: 0.1207 - val_loss: 0.0119 - val_mae: 0.0868
Epoch 41/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0300 - mae: 0.1505
13/13 [==============================] - 0s 2ms/step - loss: 0.0258 - mae: 0.1272

13/13 [==============================] - 0s 17ms/step - loss: 0.0258 - mae: 0.1272 - val_loss: 0.0112 - val_mae: 0.0824
Epoch 42/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0375 - mae: 0.1628
13/13 [==============================] - 0s 2ms/step - loss: 0.0253 - mae: 0.1263

13/13 [==============================] - 0s 17ms/step - loss: 0.0253 - mae: 0.1263 - val_loss: 0.0105 - val_mae: 0.0782
Epoch 43/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0250 - mae: 0.1343
13/13 [==============================] - 0s 2ms/step - loss: 0.0249 - mae: 0.1270

13/13 [==============================] - 0s 17ms/step - loss: 0.0249 - mae: 0.1270 - val_loss: 0.0106 - val_mae: 0.0758
Epoch 44/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0292 - mae: 0.1428
13/13 [==============================] - 0s 2ms/step - loss: 0.0297 - mae: 0.1368

13/13 [==============================] - 0s 17ms/step - loss: 0.0297 - mae: 0.1368 - val_loss: 0.0102 - val_mae: 0.0759
Epoch 45/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0243 - mae: 0.1173
13/13 [==============================] - 0s 2ms/step - loss: 0.0278 - mae: 0.1330

13/13 [==============================] - 0s 17ms/step - loss: 0.0278 - mae: 0.1330 - val_loss: 0.0103 - val_mae: 0.0773
Epoch 46/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0338 - mae: 0.1465
13/13 [==============================] - 0s 2ms/step - loss: 0.0242 - mae: 0.1253

13/13 [==============================] - 0s 17ms/step - loss: 0.0242 - mae: 0.1253 - val_loss: 0.0102 - val_mae: 0.0751
Epoch 47/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0177 - mae: 0.0964
13/13 [==============================] - 0s 3ms/step - loss: 0.0215 - mae: 0.1168

13/13 [==============================] - 0s 17ms/step - loss: 0.0215 - mae: 0.1168 - val_loss: 0.0100 - val_mae: 0.0747
Epoch 48/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0226 - mae: 0.1267
13/13 [==============================] - 0s 3ms/step - loss: 0.0220 - mae: 0.1212

13/13 [==============================] - 0s 17ms/step - loss: 0.0220 - mae: 0.1212 - val_loss: 0.0113 - val_mae: 0.0837
Epoch 49/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0160 - mae: 0.0952
13/13 [==============================] - 0s 2ms/step - loss: 0.0232 - mae: 0.1187

13/13 [==============================] - 0s 17ms/step - loss: 0.0232 - mae: 0.1187 - val_loss: 0.0111 - val_mae: 0.0822
Epoch 50/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0167 - mae: 0.1040
13/13 [==============================] - 0s 2ms/step - loss: 0.0237 - mae: 0.1234

13/13 [==============================] - 0s 17ms/step - loss: 0.0237 - mae: 0.1234 - val_loss: 0.0123 - val_mae: 0.0884

Run completed: runs/2022-12-07T02-53-12Z

Training run 44/52 (flags = list(16, 32, 0.01, 50, 30, "sigmoid", "tanh", 0.2, 0.2)) 
Using run directory runs/2022-12-07T02-53-34Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

1/8 [==>...........................] - ETA: 2s - loss: 0.7565 - mae: 0.7752
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0020s vs `on_train_batch_end` time: 0.0035s). Check your callbacks.

8/8 [==============================] - 0s 2ms/step - loss: 0.3825 - mae: 0.5037

8/8 [==============================] - 1s 106ms/step - loss: 0.3825 - mae: 0.5037 - val_loss: 0.0363 - val_mae: 0.1564
Epoch 2/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1638 - mae: 0.3244
8/8 [==============================] - 0s 2ms/step - loss: 0.1840 - mae: 0.3354

8/8 [==============================] - 0s 26ms/step - loss: 0.1840 - mae: 0.3354 - val_loss: 0.0237 - val_mae: 0.1207
Epoch 3/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1412 - mae: 0.3095
8/8 [==============================] - 0s 3ms/step - loss: 0.1537 - mae: 0.3199

8/8 [==============================] - 0s 31ms/step - loss: 0.1537 - mae: 0.3199 - val_loss: 0.0220 - val_mae: 0.1166
Epoch 4/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2200 - mae: 0.3825
8/8 [==============================] - 0s 2ms/step - loss: 0.1451 - mae: 0.3098

8/8 [==============================] - 0s 29ms/step - loss: 0.1451 - mae: 0.3098 - val_loss: 0.0207 - val_mae: 0.1133
Epoch 5/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1389 - mae: 0.2997
8/8 [==============================] - 0s 2ms/step - loss: 0.1444 - mae: 0.3072

8/8 [==============================] - 0s 28ms/step - loss: 0.1444 - mae: 0.3072 - val_loss: 0.0195 - val_mae: 0.1098
Epoch 6/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1909 - mae: 0.3760
8/8 [==============================] - 0s 2ms/step - loss: 0.1501 - mae: 0.3097

8/8 [==============================] - 0s 29ms/step - loss: 0.1501 - mae: 0.3097 - val_loss: 0.0193 - val_mae: 0.1103
Epoch 7/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1624 - mae: 0.3320
8/8 [==============================] - 0s 2ms/step - loss: 0.1343 - mae: 0.2999

8/8 [==============================] - 0s 28ms/step - loss: 0.1343 - mae: 0.2999 - val_loss: 0.0167 - val_mae: 0.1015
Epoch 8/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1197 - mae: 0.2790
8/8 [==============================] - 0s 2ms/step - loss: 0.1185 - mae: 0.2836

8/8 [==============================] - 0s 29ms/step - loss: 0.1185 - mae: 0.2836 - val_loss: 0.0158 - val_mae: 0.0988
Epoch 9/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1160 - mae: 0.2780
8/8 [==============================] - 0s 2ms/step - loss: 0.1223 - mae: 0.2830

8/8 [==============================] - 0s 26ms/step - loss: 0.1223 - mae: 0.2830 - val_loss: 0.0151 - val_mae: 0.0974
Epoch 10/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1086 - mae: 0.2826
8/8 [==============================] - 0s 2ms/step - loss: 0.1079 - mae: 0.2639

8/8 [==============================] - 0s 29ms/step - loss: 0.1079 - mae: 0.2639 - val_loss: 0.0146 - val_mae: 0.0958
Epoch 11/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1636 - mae: 0.3151
8/8 [==============================] - 0s 2ms/step - loss: 0.1180 - mae: 0.2730

8/8 [==============================] - 0s 29ms/step - loss: 0.1180 - mae: 0.2730 - val_loss: 0.0138 - val_mae: 0.0926
Epoch 12/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1290 - mae: 0.2892
8/8 [==============================] - 0s 2ms/step - loss: 0.1091 - mae: 0.2621

8/8 [==============================] - 0s 27ms/step - loss: 0.1091 - mae: 0.2621 - val_loss: 0.0134 - val_mae: 0.0915
Epoch 13/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0818 - mae: 0.2320
8/8 [==============================] - 0s 2ms/step - loss: 0.0935 - mae: 0.2490

8/8 [==============================] - 0s 26ms/step - loss: 0.0935 - mae: 0.2490 - val_loss: 0.0137 - val_mae: 0.0913
Epoch 14/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0859 - mae: 0.2375
8/8 [==============================] - 0s 2ms/step - loss: 0.0914 - mae: 0.2401

8/8 [==============================] - 0s 27ms/step - loss: 0.0914 - mae: 0.2401 - val_loss: 0.0126 - val_mae: 0.0880
Epoch 15/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1126 - mae: 0.2644
8/8 [==============================] - 0s 2ms/step - loss: 0.0980 - mae: 0.2519

8/8 [==============================] - 0s 31ms/step - loss: 0.0980 - mae: 0.2519 - val_loss: 0.0135 - val_mae: 0.0954
Epoch 16/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0762 - mae: 0.2208
8/8 [==============================] - 0s 2ms/step - loss: 0.0846 - mae: 0.2376

8/8 [==============================] - 0s 27ms/step - loss: 0.0846 - mae: 0.2376 - val_loss: 0.0127 - val_mae: 0.0922
Epoch 17/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0997 - mae: 0.2536
8/8 [==============================] - 0s 2ms/step - loss: 0.0838 - mae: 0.2364

8/8 [==============================] - 0s 27ms/step - loss: 0.0838 - mae: 0.2364 - val_loss: 0.0112 - val_mae: 0.0831
Epoch 18/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0839 - mae: 0.2369
8/8 [==============================] - 0s 2ms/step - loss: 0.0829 - mae: 0.2352

8/8 [==============================] - 0s 30ms/step - loss: 0.0829 - mae: 0.2352 - val_loss: 0.0108 - val_mae: 0.0819
Epoch 19/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0893 - mae: 0.2455
8/8 [==============================] - 0s 2ms/step - loss: 0.0732 - mae: 0.2165

8/8 [==============================] - 0s 29ms/step - loss: 0.0732 - mae: 0.2165 - val_loss: 0.0109 - val_mae: 0.0812
Epoch 20/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0918 - mae: 0.2470
8/8 [==============================] - 0s 2ms/step - loss: 0.0759 - mae: 0.2175

8/8 [==============================] - 0s 29ms/step - loss: 0.0759 - mae: 0.2175 - val_loss: 0.0105 - val_mae: 0.0796
Epoch 21/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0910 - mae: 0.2419
8/8 [==============================] - 0s 2ms/step - loss: 0.0734 - mae: 0.2156

8/8 [==============================] - 0s 27ms/step - loss: 0.0734 - mae: 0.2156 - val_loss: 0.0105 - val_mae: 0.0788
Epoch 22/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0848 - mae: 0.2331
8/8 [==============================] - 0s 3ms/step - loss: 0.0758 - mae: 0.2166

8/8 [==============================] - 0s 28ms/step - loss: 0.0758 - mae: 0.2166 - val_loss: 0.0098 - val_mae: 0.0769
Epoch 23/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0672 - mae: 0.1995
8/8 [==============================] - 0s 3ms/step - loss: 0.0627 - mae: 0.2028

8/8 [==============================] - 0s 29ms/step - loss: 0.0627 - mae: 0.2028 - val_loss: 0.0104 - val_mae: 0.0833
Epoch 24/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0957 - mae: 0.2495
8/8 [==============================] - 0s 3ms/step - loss: 0.0677 - mae: 0.2081

8/8 [==============================] - 0s 28ms/step - loss: 0.0677 - mae: 0.2081 - val_loss: 0.0097 - val_mae: 0.0783
Epoch 25/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0646 - mae: 0.2111
8/8 [==============================] - 0s 2ms/step - loss: 0.0624 - mae: 0.1913

8/8 [==============================] - 0s 27ms/step - loss: 0.0624 - mae: 0.1913 - val_loss: 0.0092 - val_mae: 0.0738
Epoch 26/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0581 - mae: 0.1843
8/8 [==============================] - 0s 2ms/step - loss: 0.0582 - mae: 0.1884

8/8 [==============================] - 0s 28ms/step - loss: 0.0582 - mae: 0.1884 - val_loss: 0.0094 - val_mae: 0.0738
Epoch 27/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0898 - mae: 0.2305
8/8 [==============================] - 0s 2ms/step - loss: 0.0602 - mae: 0.1959

8/8 [==============================] - 0s 29ms/step - loss: 0.0602 - mae: 0.1959 - val_loss: 0.0088 - val_mae: 0.0719
Epoch 28/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0541 - mae: 0.1765
8/8 [==============================] - 0s 2ms/step - loss: 0.0614 - mae: 0.1941

8/8 [==============================] - 0s 27ms/step - loss: 0.0614 - mae: 0.1941 - val_loss: 0.0089 - val_mae: 0.0714
Epoch 29/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0758 - mae: 0.2281
8/8 [==============================] - 0s 2ms/step - loss: 0.0634 - mae: 0.2043

8/8 [==============================] - 0s 27ms/step - loss: 0.0634 - mae: 0.2043 - val_loss: 0.0092 - val_mae: 0.0718
Epoch 30/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0317 - mae: 0.1405
8/8 [==============================] - 0s 3ms/step - loss: 0.0532 - mae: 0.1843

8/8 [==============================] - 0s 30ms/step - loss: 0.0532 - mae: 0.1843 - val_loss: 0.0083 - val_mae: 0.0693

Run completed: runs/2022-12-07T02-53-34Z

Training run 45/52 (flags = list(16, 10, 0.01, 50, 50, "tanh", "sigmoid", 0.6, 0.2)) 
Using run directory runs/2022-12-07T02-53-53Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

1/8 [==>...........................] - ETA: 2s - loss: 1.4832 - mae: 1.1289
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0014s vs `on_train_batch_end` time: 0.0040s). Check your callbacks.

8/8 [==============================] - 0s 2ms/step - loss: 1.0027 - mae: 0.8784

8/8 [==============================] - 1s 106ms/step - loss: 1.0027 - mae: 0.8784 - val_loss: 0.3794 - val_mae: 0.5891
Epoch 2/50

1/8 [==>...........................] - ETA: 0s - loss: 0.5431 - mae: 0.6114
8/8 [==============================] - 0s 3ms/step - loss: 0.4273 - mae: 0.5368

8/8 [==============================] - 0s 27ms/step - loss: 0.4273 - mae: 0.5368 - val_loss: 0.1469 - val_mae: 0.3469
Epoch 3/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2639 - mae: 0.4061
8/8 [==============================] - 0s 2ms/step - loss: 0.2743 - mae: 0.4165

8/8 [==============================] - 0s 26ms/step - loss: 0.2743 - mae: 0.4165 - val_loss: 0.0722 - val_mae: 0.2328
Epoch 4/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2700 - mae: 0.3739
8/8 [==============================] - 0s 3ms/step - loss: 0.2470 - mae: 0.3978

8/8 [==============================] - 0s 31ms/step - loss: 0.2470 - mae: 0.3978 - val_loss: 0.0353 - val_mae: 0.1582
Epoch 5/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2955 - mae: 0.4168
8/8 [==============================] - 0s 2ms/step - loss: 0.2144 - mae: 0.3705

8/8 [==============================] - 0s 29ms/step - loss: 0.2144 - mae: 0.3705 - val_loss: 0.0238 - val_mae: 0.1290
Epoch 6/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2368 - mae: 0.3796
8/8 [==============================] - 0s 3ms/step - loss: 0.1967 - mae: 0.3526

8/8 [==============================] - 0s 30ms/step - loss: 0.1967 - mae: 0.3526 - val_loss: 0.0201 - val_mae: 0.1158
Epoch 7/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2313 - mae: 0.3701
8/8 [==============================] - 0s 2ms/step - loss: 0.1940 - mae: 0.3501

8/8 [==============================] - 0s 28ms/step - loss: 0.1940 - mae: 0.3501 - val_loss: 0.0198 - val_mae: 0.1161
Epoch 8/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2022 - mae: 0.3568
8/8 [==============================] - 0s 2ms/step - loss: 0.1867 - mae: 0.3406

8/8 [==============================] - 0s 27ms/step - loss: 0.1867 - mae: 0.3406 - val_loss: 0.0184 - val_mae: 0.1115
Epoch 9/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1725 - mae: 0.3492
8/8 [==============================] - 0s 3ms/step - loss: 0.1895 - mae: 0.3494

8/8 [==============================] - 0s 29ms/step - loss: 0.1895 - mae: 0.3494 - val_loss: 0.0166 - val_mae: 0.1048
Epoch 10/50

1/8 [==>...........................] - ETA: 0s - loss: 0.2184 - mae: 0.3651
8/8 [==============================] - 0s 2ms/step - loss: 0.1795 - mae: 0.3325

8/8 [==============================] - 0s 29ms/step - loss: 0.1795 - mae: 0.3325 - val_loss: 0.0164 - val_mae: 0.1047
Epoch 11/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1785 - mae: 0.3210
8/8 [==============================] - 0s 3ms/step - loss: 0.1623 - mae: 0.3156

8/8 [==============================] - 1s 80ms/step - loss: 0.1623 - mae: 0.3156 - val_loss: 0.0144 - val_mae: 0.0969
Epoch 12/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1551 - mae: 0.3189
8/8 [==============================] - 0s 2ms/step - loss: 0.1502 - mae: 0.3087

8/8 [==============================] - 0s 26ms/step - loss: 0.1502 - mae: 0.3087 - val_loss: 0.0145 - val_mae: 0.0974
Epoch 13/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1619 - mae: 0.3326
8/8 [==============================] - 0s 2ms/step - loss: 0.1504 - mae: 0.3191

8/8 [==============================] - 0s 29ms/step - loss: 0.1504 - mae: 0.3191 - val_loss: 0.0134 - val_mae: 0.0936
Epoch 14/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1965 - mae: 0.3484
8/8 [==============================] - 0s 2ms/step - loss: 0.1445 - mae: 0.3053

8/8 [==============================] - 0s 28ms/step - loss: 0.1445 - mae: 0.3053 - val_loss: 0.0127 - val_mae: 0.0909
Epoch 15/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0779 - mae: 0.2175
8/8 [==============================] - 0s 3ms/step - loss: 0.1192 - mae: 0.2809

8/8 [==============================] - 0s 27ms/step - loss: 0.1192 - mae: 0.2809 - val_loss: 0.0121 - val_mae: 0.0885
Epoch 16/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1527 - mae: 0.3246
8/8 [==============================] - 0s 2ms/step - loss: 0.1263 - mae: 0.2917

8/8 [==============================] - 0s 27ms/step - loss: 0.1263 - mae: 0.2917 - val_loss: 0.0121 - val_mae: 0.0884
Epoch 17/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1325 - mae: 0.3086
8/8 [==============================] - 0s 2ms/step - loss: 0.1201 - mae: 0.2808

8/8 [==============================] - 0s 29ms/step - loss: 0.1201 - mae: 0.2808 - val_loss: 0.0112 - val_mae: 0.0848
Epoch 18/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1225 - mae: 0.2890
8/8 [==============================] - 0s 2ms/step - loss: 0.1289 - mae: 0.2901

8/8 [==============================] - 0s 27ms/step - loss: 0.1289 - mae: 0.2901 - val_loss: 0.0109 - val_mae: 0.0834
Epoch 19/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1151 - mae: 0.2751
8/8 [==============================] - 0s 2ms/step - loss: 0.1037 - mae: 0.2595

8/8 [==============================] - 0s 31ms/step - loss: 0.1037 - mae: 0.2595 - val_loss: 0.0114 - val_mae: 0.0859
Epoch 20/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1241 - mae: 0.2928
8/8 [==============================] - 0s 2ms/step - loss: 0.1305 - mae: 0.2896

8/8 [==============================] - 0s 26ms/step - loss: 0.1305 - mae: 0.2896 - val_loss: 0.0116 - val_mae: 0.0876
Epoch 21/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1004 - mae: 0.2597
8/8 [==============================] - 0s 2ms/step - loss: 0.1173 - mae: 0.2741

8/8 [==============================] - 0s 27ms/step - loss: 0.1173 - mae: 0.2741 - val_loss: 0.0111 - val_mae: 0.0851
Epoch 22/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1136 - mae: 0.2630
8/8 [==============================] - 0s 3ms/step - loss: 0.1155 - mae: 0.2734

8/8 [==============================] - 0s 29ms/step - loss: 0.1155 - mae: 0.2734 - val_loss: 0.0104 - val_mae: 0.0814
Epoch 23/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0748 - mae: 0.2131
8/8 [==============================] - 0s 2ms/step - loss: 0.0950 - mae: 0.2485

8/8 [==============================] - 0s 27ms/step - loss: 0.0950 - mae: 0.2485 - val_loss: 0.0109 - val_mae: 0.0847
Epoch 24/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1387 - mae: 0.3116
8/8 [==============================] - 0s 3ms/step - loss: 0.1085 - mae: 0.2644

8/8 [==============================] - 0s 28ms/step - loss: 0.1085 - mae: 0.2644 - val_loss: 0.0097 - val_mae: 0.0785
Epoch 25/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0952 - mae: 0.2595
8/8 [==============================] - 0s 3ms/step - loss: 0.1095 - mae: 0.2665

8/8 [==============================] - 0s 31ms/step - loss: 0.1095 - mae: 0.2665 - val_loss: 0.0091 - val_mae: 0.0760
Epoch 26/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0934 - mae: 0.2440
8/8 [==============================] - 0s 2ms/step - loss: 0.0991 - mae: 0.2529

8/8 [==============================] - 0s 27ms/step - loss: 0.0991 - mae: 0.2529 - val_loss: 0.0090 - val_mae: 0.0754
Epoch 27/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0954 - mae: 0.2576
8/8 [==============================] - 0s 3ms/step - loss: 0.0984 - mae: 0.2455

8/8 [==============================] - 0s 28ms/step - loss: 0.0984 - mae: 0.2455 - val_loss: 0.0098 - val_mae: 0.0798
Epoch 28/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0660 - mae: 0.2161
8/8 [==============================] - 0s 3ms/step - loss: 0.0924 - mae: 0.2401

8/8 [==============================] - 0s 29ms/step - loss: 0.0924 - mae: 0.2401 - val_loss: 0.0086 - val_mae: 0.0738
Epoch 29/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0783 - mae: 0.2194
8/8 [==============================] - 0s 2ms/step - loss: 0.0986 - mae: 0.2512

8/8 [==============================] - 0s 29ms/step - loss: 0.0986 - mae: 0.2512 - val_loss: 0.0085 - val_mae: 0.0733
Epoch 30/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0844 - mae: 0.2255
8/8 [==============================] - 0s 3ms/step - loss: 0.0895 - mae: 0.2381

8/8 [==============================] - 0s 31ms/step - loss: 0.0895 - mae: 0.2381 - val_loss: 0.0087 - val_mae: 0.0740
Epoch 31/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0780 - mae: 0.2311
8/8 [==============================] - 0s 2ms/step - loss: 0.0823 - mae: 0.2327

8/8 [==============================] - 0s 29ms/step - loss: 0.0823 - mae: 0.2327 - val_loss: 0.0082 - val_mae: 0.0719
Epoch 32/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1219 - mae: 0.2844
8/8 [==============================] - 0s 3ms/step - loss: 0.0889 - mae: 0.2372

8/8 [==============================] - 0s 31ms/step - loss: 0.0889 - mae: 0.2372 - val_loss: 0.0078 - val_mae: 0.0701
Epoch 33/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0569 - mae: 0.1917
8/8 [==============================] - 0s 2ms/step - loss: 0.0707 - mae: 0.2072

8/8 [==============================] - 0s 29ms/step - loss: 0.0707 - mae: 0.2072 - val_loss: 0.0079 - val_mae: 0.0710
Epoch 34/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0812 - mae: 0.2205
8/8 [==============================] - 0s 2ms/step - loss: 0.0857 - mae: 0.2378

8/8 [==============================] - 0s 26ms/step - loss: 0.0857 - mae: 0.2378 - val_loss: 0.0075 - val_mae: 0.0684
Epoch 35/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0799 - mae: 0.2291
8/8 [==============================] - 0s 3ms/step - loss: 0.0853 - mae: 0.2329

8/8 [==============================] - 0s 26ms/step - loss: 0.0853 - mae: 0.2329 - val_loss: 0.0076 - val_mae: 0.0692
Epoch 36/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0686 - mae: 0.2121
8/8 [==============================] - 0s 2ms/step - loss: 0.0705 - mae: 0.2130

8/8 [==============================] - 0s 31ms/step - loss: 0.0705 - mae: 0.2130 - val_loss: 0.0075 - val_mae: 0.0688
Epoch 37/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0654 - mae: 0.2041
8/8 [==============================] - 0s 2ms/step - loss: 0.0775 - mae: 0.2256

8/8 [==============================] - 0s 26ms/step - loss: 0.0775 - mae: 0.2256 - val_loss: 0.0074 - val_mae: 0.0683
Epoch 38/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1023 - mae: 0.2550
8/8 [==============================] - 0s 3ms/step - loss: 0.0759 - mae: 0.2207

8/8 [==============================] - 0s 26ms/step - loss: 0.0759 - mae: 0.2207 - val_loss: 0.0073 - val_mae: 0.0677
Epoch 39/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0733 - mae: 0.1999
8/8 [==============================] - 0s 2ms/step - loss: 0.0697 - mae: 0.2090

8/8 [==============================] - 0s 28ms/step - loss: 0.0697 - mae: 0.2090 - val_loss: 0.0077 - val_mae: 0.0693
Epoch 40/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0864 - mae: 0.2281
8/8 [==============================] - 0s 2ms/step - loss: 0.0776 - mae: 0.2215

8/8 [==============================] - 0s 29ms/step - loss: 0.0776 - mae: 0.2215 - val_loss: 0.0080 - val_mae: 0.0712
Epoch 41/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1061 - mae: 0.2543
8/8 [==============================] - 0s 2ms/step - loss: 0.0763 - mae: 0.2183

8/8 [==============================] - 0s 29ms/step - loss: 0.0763 - mae: 0.2183 - val_loss: 0.0073 - val_mae: 0.0676
Epoch 42/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0601 - mae: 0.1929
8/8 [==============================] - 0s 2ms/step - loss: 0.0718 - mae: 0.2157

8/8 [==============================] - 0s 28ms/step - loss: 0.0718 - mae: 0.2157 - val_loss: 0.0079 - val_mae: 0.0713
Epoch 43/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0768 - mae: 0.2267
8/8 [==============================] - 0s 3ms/step - loss: 0.0824 - mae: 0.2307

8/8 [==============================] - 0s 27ms/step - loss: 0.0824 - mae: 0.2307 - val_loss: 0.0077 - val_mae: 0.0700
Epoch 44/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0752 - mae: 0.2181
8/8 [==============================] - 0s 2ms/step - loss: 0.0658 - mae: 0.2053

8/8 [==============================] - 0s 26ms/step - loss: 0.0658 - mae: 0.2053 - val_loss: 0.0073 - val_mae: 0.0675
Epoch 45/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0658 - mae: 0.2081
8/8 [==============================] - 0s 2ms/step - loss: 0.0659 - mae: 0.2019

8/8 [==============================] - 0s 27ms/step - loss: 0.0659 - mae: 0.2019 - val_loss: 0.0075 - val_mae: 0.0689
Epoch 46/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0623 - mae: 0.2012
8/8 [==============================] - 0s 2ms/step - loss: 0.0614 - mae: 0.1987

8/8 [==============================] - 0s 27ms/step - loss: 0.0614 - mae: 0.1987 - val_loss: 0.0072 - val_mae: 0.0673
Epoch 47/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0775 - mae: 0.2210
8/8 [==============================] - 0s 3ms/step - loss: 0.0628 - mae: 0.1992

8/8 [==============================] - 0s 29ms/step - loss: 0.0628 - mae: 0.1992 - val_loss: 0.0068 - val_mae: 0.0646
Epoch 48/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0559 - mae: 0.1866
8/8 [==============================] - 0s 2ms/step - loss: 0.0563 - mae: 0.1840

8/8 [==============================] - 0s 25ms/step - loss: 0.0563 - mae: 0.1840 - val_loss: 0.0067 - val_mae: 0.0639
Epoch 49/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0473 - mae: 0.1768
8/8 [==============================] - 0s 2ms/step - loss: 0.0613 - mae: 0.1998

8/8 [==============================] - 0s 29ms/step - loss: 0.0613 - mae: 0.1998 - val_loss: 0.0067 - val_mae: 0.0642
Epoch 50/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0462 - mae: 0.1735
8/8 [==============================] - 0s 2ms/step - loss: 0.0551 - mae: 0.1870

8/8 [==============================] - 0s 29ms/step - loss: 0.0551 - mae: 0.1870 - val_loss: 0.0070 - val_mae: 0.0659

Run completed: runs/2022-12-07T02-53-53Z

Training run 46/52 (flags = list(16, 32, 0.01, 50, 50, "sigmoid", "relu", 0.2, 0.6)) 
Using run directory runs/2022-12-07T02-54-16Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

1/8 [==>...........................] - ETA: 3s - loss: 0.8760 - mae: 0.8060
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0017s vs `on_train_batch_end` time: 0.0040s). Check your callbacks.

8/8 [==============================] - 0s 2ms/step - loss: 0.6227 - mae: 0.6649

8/8 [==============================] - 1s 107ms/step - loss: 0.6227 - mae: 0.6649 - val_loss: 0.0870 - val_mae: 0.2797
Epoch 2/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3868 - mae: 0.4951
8/8 [==============================] - 0s 2ms/step - loss: 0.3377 - mae: 0.4804

8/8 [==============================] - 0s 26ms/step - loss: 0.3377 - mae: 0.4804 - val_loss: 0.0335 - val_mae: 0.1685
Epoch 3/50

1/8 [==>...........................] - ETA: 0s - loss: 0.3403 - mae: 0.4842
8/8 [==============================] - 0s 2ms/step - loss: 0.2423 - mae: 0.3972

8/8 [==============================] - 0s 28ms/step - loss: 0.2423 - mae: 0.3972 - val_loss: 0.0289 - val_mae: 0.1561
Epoch 4/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1822 - mae: 0.3558
8/8 [==============================] - 0s 3ms/step - loss: 0.1741 - mae: 0.3368

8/8 [==============================] - 0s 29ms/step - loss: 0.1741 - mae: 0.3368 - val_loss: 0.0252 - val_mae: 0.1454
Epoch 5/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1969 - mae: 0.3715
8/8 [==============================] - 0s 2ms/step - loss: 0.1875 - mae: 0.3534

8/8 [==============================] - 0s 29ms/step - loss: 0.1875 - mae: 0.3534 - val_loss: 0.0263 - val_mae: 0.1489
Epoch 6/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1689 - mae: 0.3366
8/8 [==============================] - 0s 3ms/step - loss: 0.1658 - mae: 0.3422

8/8 [==============================] - 0s 31ms/step - loss: 0.1658 - mae: 0.3422 - val_loss: 0.0209 - val_mae: 0.1315
Epoch 7/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1487 - mae: 0.3011
8/8 [==============================] - 0s 2ms/step - loss: 0.1305 - mae: 0.2888

8/8 [==============================] - 0s 27ms/step - loss: 0.1305 - mae: 0.2888 - val_loss: 0.0148 - val_mae: 0.1071
Epoch 8/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1471 - mae: 0.3116
8/8 [==============================] - 0s 2ms/step - loss: 0.1353 - mae: 0.2952

8/8 [==============================] - 0s 29ms/step - loss: 0.1353 - mae: 0.2952 - val_loss: 0.0149 - val_mae: 0.1079
Epoch 9/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0990 - mae: 0.2437
8/8 [==============================] - 0s 2ms/step - loss: 0.1071 - mae: 0.2678

8/8 [==============================] - 0s 28ms/step - loss: 0.1071 - mae: 0.2678 - val_loss: 0.0166 - val_mae: 0.1148
Epoch 10/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1055 - mae: 0.2724
8/8 [==============================] - 0s 2ms/step - loss: 0.0975 - mae: 0.2539

8/8 [==============================] - 0s 29ms/step - loss: 0.0975 - mae: 0.2539 - val_loss: 0.0176 - val_mae: 0.1187
Epoch 11/50

1/8 [==>...........................] - ETA: 0s - loss: 0.1204 - mae: 0.2898
8/8 [==============================] - 0s 3ms/step - loss: 0.1141 - mae: 0.2778

8/8 [==============================] - 0s 29ms/step - loss: 0.1141 - mae: 0.2778 - val_loss: 0.0174 - val_mae: 0.1181
Epoch 12/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0900 - mae: 0.2363
8/8 [==============================] - 0s 2ms/step - loss: 0.1012 - mae: 0.2552

8/8 [==============================] - 0s 28ms/step - loss: 0.1012 - mae: 0.2552 - val_loss: 0.0164 - val_mae: 0.1145
Epoch 13/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0890 - mae: 0.2398
8/8 [==============================] - 0s 3ms/step - loss: 0.0888 - mae: 0.2382

8/8 [==============================] - 0s 29ms/step - loss: 0.0888 - mae: 0.2382 - val_loss: 0.0164 - val_mae: 0.1144
Epoch 14/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0681 - mae: 0.2045
8/8 [==============================] - 0s 2ms/step - loss: 0.0776 - mae: 0.2218

8/8 [==============================] - 0s 29ms/step - loss: 0.0776 - mae: 0.2218 - val_loss: 0.0173 - val_mae: 0.1180
Epoch 15/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0823 - mae: 0.2271
8/8 [==============================] - 0s 3ms/step - loss: 0.0881 - mae: 0.2386

8/8 [==============================] - 0s 27ms/step - loss: 0.0881 - mae: 0.2386 - val_loss: 0.0159 - val_mae: 0.1126
Epoch 16/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0623 - mae: 0.2050
8/8 [==============================] - 0s 3ms/step - loss: 0.0724 - mae: 0.2163

8/8 [==============================] - 0s 29ms/step - loss: 0.0724 - mae: 0.2163 - val_loss: 0.0156 - val_mae: 0.1117
Epoch 17/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0704 - mae: 0.2225
8/8 [==============================] - 0s 3ms/step - loss: 0.0627 - mae: 0.2036

8/8 [==============================] - 0s 33ms/step - loss: 0.0627 - mae: 0.2036 - val_loss: 0.0165 - val_mae: 0.1149
Epoch 18/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0671 - mae: 0.2033
8/8 [==============================] - 0s 2ms/step - loss: 0.0572 - mae: 0.1905

8/8 [==============================] - 0s 29ms/step - loss: 0.0572 - mae: 0.1905 - val_loss: 0.0169 - val_mae: 0.1167
Epoch 19/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0808 - mae: 0.2345
8/8 [==============================] - 0s 2ms/step - loss: 0.0736 - mae: 0.2226

8/8 [==============================] - 0s 28ms/step - loss: 0.0736 - mae: 0.2226 - val_loss: 0.0162 - val_mae: 0.1140
Epoch 20/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0482 - mae: 0.1821
8/8 [==============================] - 0s 2ms/step - loss: 0.0586 - mae: 0.1956

8/8 [==============================] - 0s 29ms/step - loss: 0.0586 - mae: 0.1956 - val_loss: 0.0167 - val_mae: 0.1160
Epoch 21/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0610 - mae: 0.1930
8/8 [==============================] - 0s 3ms/step - loss: 0.0623 - mae: 0.2006

8/8 [==============================] - 0s 40ms/step - loss: 0.0623 - mae: 0.2006 - val_loss: 0.0170 - val_mae: 0.1170
Epoch 22/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0507 - mae: 0.1909
8/8 [==============================] - 0s 4ms/step - loss: 0.0596 - mae: 0.2001

8/8 [==============================] - 0s 39ms/step - loss: 0.0596 - mae: 0.2001 - val_loss: 0.0176 - val_mae: 0.1189
Epoch 23/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0489 - mae: 0.1753
8/8 [==============================] - 0s 3ms/step - loss: 0.0490 - mae: 0.1750

8/8 [==============================] - 0s 35ms/step - loss: 0.0490 - mae: 0.1750 - val_loss: 0.0151 - val_mae: 0.1094
Epoch 24/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0517 - mae: 0.1794
8/8 [==============================] - 0s 3ms/step - loss: 0.0469 - mae: 0.1742

8/8 [==============================] - 0s 28ms/step - loss: 0.0469 - mae: 0.1742 - val_loss: 0.0155 - val_mae: 0.1109
Epoch 25/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0418 - mae: 0.1617
8/8 [==============================] - 0s 2ms/step - loss: 0.0423 - mae: 0.1665

8/8 [==============================] - 0s 27ms/step - loss: 0.0423 - mae: 0.1665 - val_loss: 0.0148 - val_mae: 0.1082
Epoch 26/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0368 - mae: 0.1590
8/8 [==============================] - 0s 2ms/step - loss: 0.0417 - mae: 0.1679

8/8 [==============================] - 0s 27ms/step - loss: 0.0417 - mae: 0.1679 - val_loss: 0.0137 - val_mae: 0.1034
Epoch 27/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0472 - mae: 0.1829
8/8 [==============================] - 0s 2ms/step - loss: 0.0456 - mae: 0.1753

8/8 [==============================] - 0s 26ms/step - loss: 0.0456 - mae: 0.1753 - val_loss: 0.0146 - val_mae: 0.1070
Epoch 28/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0457 - mae: 0.1783
8/8 [==============================] - 0s 2ms/step - loss: 0.0450 - mae: 0.1725

8/8 [==============================] - 0s 26ms/step - loss: 0.0450 - mae: 0.1725 - val_loss: 0.0146 - val_mae: 0.1073
Epoch 29/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0494 - mae: 0.1823
8/8 [==============================] - 0s 2ms/step - loss: 0.0437 - mae: 0.1673

8/8 [==============================] - 0s 27ms/step - loss: 0.0437 - mae: 0.1673 - val_loss: 0.0133 - val_mae: 0.1014
Epoch 30/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0473 - mae: 0.1848
8/8 [==============================] - 0s 2ms/step - loss: 0.0404 - mae: 0.1606

8/8 [==============================] - 0s 26ms/step - loss: 0.0404 - mae: 0.1606 - val_loss: 0.0137 - val_mae: 0.1031
Epoch 31/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0435 - mae: 0.1633
8/8 [==============================] - 0s 3ms/step - loss: 0.0448 - mae: 0.1674

8/8 [==============================] - 0s 27ms/step - loss: 0.0448 - mae: 0.1674 - val_loss: 0.0137 - val_mae: 0.1032
Epoch 32/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0322 - mae: 0.1456
8/8 [==============================] - 0s 2ms/step - loss: 0.0438 - mae: 0.1664

8/8 [==============================] - 0s 29ms/step - loss: 0.0438 - mae: 0.1664 - val_loss: 0.0131 - val_mae: 0.1004
Epoch 33/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0338 - mae: 0.1482
8/8 [==============================] - 0s 3ms/step - loss: 0.0385 - mae: 0.1588

8/8 [==============================] - 0s 34ms/step - loss: 0.0385 - mae: 0.1588 - val_loss: 0.0126 - val_mae: 0.0982
Epoch 34/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0319 - mae: 0.1337
8/8 [==============================] - 0s 2ms/step - loss: 0.0354 - mae: 0.1510

8/8 [==============================] - 0s 27ms/step - loss: 0.0354 - mae: 0.1510 - val_loss: 0.0129 - val_mae: 0.0991
Epoch 35/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0316 - mae: 0.1489
8/8 [==============================] - 0s 2ms/step - loss: 0.0372 - mae: 0.1548

8/8 [==============================] - 0s 29ms/step - loss: 0.0372 - mae: 0.1548 - val_loss: 0.0129 - val_mae: 0.0990
Epoch 36/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0381 - mae: 0.1558
8/8 [==============================] - 0s 3ms/step - loss: 0.0372 - mae: 0.1552

8/8 [==============================] - 0s 35ms/step - loss: 0.0372 - mae: 0.1552 - val_loss: 0.0130 - val_mae: 0.0997
Epoch 37/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0407 - mae: 0.1565
8/8 [==============================] - 0s 3ms/step - loss: 0.0311 - mae: 0.1414

8/8 [==============================] - 0s 35ms/step - loss: 0.0311 - mae: 0.1414 - val_loss: 0.0125 - val_mae: 0.0972
Epoch 38/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0281 - mae: 0.1333
8/8 [==============================] - 0s 3ms/step - loss: 0.0329 - mae: 0.1470

8/8 [==============================] - 0s 26ms/step - loss: 0.0329 - mae: 0.1470 - val_loss: 0.0121 - val_mae: 0.0950
Epoch 39/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0262 - mae: 0.1335
8/8 [==============================] - 0s 2ms/step - loss: 0.0299 - mae: 0.1409

8/8 [==============================] - 0s 26ms/step - loss: 0.0299 - mae: 0.1409 - val_loss: 0.0127 - val_mae: 0.0981
Epoch 40/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0408 - mae: 0.1618
8/8 [==============================] - 0s 2ms/step - loss: 0.0321 - mae: 0.1431

8/8 [==============================] - 0s 27ms/step - loss: 0.0321 - mae: 0.1431 - val_loss: 0.0127 - val_mae: 0.0980
Epoch 41/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0273 - mae: 0.1296
8/8 [==============================] - 0s 3ms/step - loss: 0.0324 - mae: 0.1397

8/8 [==============================] - 0s 26ms/step - loss: 0.0324 - mae: 0.1397 - val_loss: 0.0125 - val_mae: 0.0971
Epoch 42/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0335 - mae: 0.1501
8/8 [==============================] - 0s 2ms/step - loss: 0.0317 - mae: 0.1450

8/8 [==============================] - 0s 27ms/step - loss: 0.0317 - mae: 0.1450 - val_loss: 0.0118 - val_mae: 0.0931
Epoch 43/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0399 - mae: 0.1528
8/8 [==============================] - 0s 2ms/step - loss: 0.0339 - mae: 0.1479

8/8 [==============================] - 0s 26ms/step - loss: 0.0339 - mae: 0.1479 - val_loss: 0.0127 - val_mae: 0.0978
Epoch 44/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0317 - mae: 0.1398
8/8 [==============================] - 0s 3ms/step - loss: 0.0315 - mae: 0.1408

8/8 [==============================] - 0s 26ms/step - loss: 0.0315 - mae: 0.1408 - val_loss: 0.0125 - val_mae: 0.0967
Epoch 45/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0261 - mae: 0.1305
8/8 [==============================] - 0s 2ms/step - loss: 0.0287 - mae: 0.1371

8/8 [==============================] - 0s 27ms/step - loss: 0.0287 - mae: 0.1371 - val_loss: 0.0127 - val_mae: 0.0977
Epoch 46/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0256 - mae: 0.1359
8/8 [==============================] - 0s 2ms/step - loss: 0.0296 - mae: 0.1410

8/8 [==============================] - 0s 26ms/step - loss: 0.0296 - mae: 0.1410 - val_loss: 0.0133 - val_mae: 0.0997
Epoch 47/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0196 - mae: 0.1091
8/8 [==============================] - 0s 2ms/step - loss: 0.0264 - mae: 0.1283

8/8 [==============================] - 0s 29ms/step - loss: 0.0264 - mae: 0.1283 - val_loss: 0.0128 - val_mae: 0.0977
Epoch 48/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0252 - mae: 0.1281
8/8 [==============================] - 0s 3ms/step - loss: 0.0271 - mae: 0.1317

8/8 [==============================] - 0s 30ms/step - loss: 0.0271 - mae: 0.1317 - val_loss: 0.0133 - val_mae: 0.0997
Epoch 49/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0250 - mae: 0.1260
8/8 [==============================] - 0s 2ms/step - loss: 0.0270 - mae: 0.1299

8/8 [==============================] - 0s 27ms/step - loss: 0.0270 - mae: 0.1299 - val_loss: 0.0135 - val_mae: 0.1004
Epoch 50/50

1/8 [==>...........................] - ETA: 0s - loss: 0.0267 - mae: 0.1291
8/8 [==============================] - 0s 3ms/step - loss: 0.0266 - mae: 0.1283

8/8 [==============================] - 0s 27ms/step - loss: 0.0266 - mae: 0.1283 - val_loss: 0.0131 - val_mae: 0.0986

Run completed: runs/2022-12-07T02-54-16Z

Training run 47/52 (flags = list(16, 32, 0.01, 50, 30, "relu", "relu", 0.2, 0.2)) 
Using run directory runs/2022-12-07T02-54-38Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

1/8 [==>...........................] - ETA: 3s - loss: 0.5204 - mae: 0.6390
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0014s vs `on_train_batch_end` time: 0.0023s). Check your callbacks.

8/8 [==============================] - 0s 2ms/step - loss: 0.4144 - mae: 0.5771

8/8 [==============================] - 1s 105ms/step - loss: 0.4144 - mae: 0.5771 - val_loss: 0.1531 - val_mae: 0.3647
Epoch 2/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1780 - mae: 0.3735
8/8 [==============================] - 0s 2ms/step - loss: 0.1851 - mae: 0.3706

8/8 [==============================] - 0s 27ms/step - loss: 0.1851 - mae: 0.3706 - val_loss: 0.0800 - val_mae: 0.2527
Epoch 3/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1441 - mae: 0.3151
8/8 [==============================] - 0s 2ms/step - loss: 0.1297 - mae: 0.3008

8/8 [==============================] - 0s 29ms/step - loss: 0.1297 - mae: 0.3008 - val_loss: 0.0529 - val_mae: 0.1927
Epoch 4/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1321 - mae: 0.3156
8/8 [==============================] - 0s 3ms/step - loss: 0.1161 - mae: 0.2851

8/8 [==============================] - 0s 35ms/step - loss: 0.1161 - mae: 0.2851 - val_loss: 0.0422 - val_mae: 0.1636
Epoch 5/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1015 - mae: 0.2607
8/8 [==============================] - 0s 4ms/step - loss: 0.1217 - mae: 0.2860

8/8 [==============================] - 0s 42ms/step - loss: 0.1217 - mae: 0.2860 - val_loss: 0.0397 - val_mae: 0.1574
Epoch 6/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1391 - mae: 0.3099
8/8 [==============================] - 0s 4ms/step - loss: 0.1058 - mae: 0.2671

8/8 [==============================] - 0s 46ms/step - loss: 0.1058 - mae: 0.2671 - val_loss: 0.0348 - val_mae: 0.1424
Epoch 7/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0874 - mae: 0.2471
8/8 [==============================] - 0s 4ms/step - loss: 0.0959 - mae: 0.2489

8/8 [==============================] - 0s 37ms/step - loss: 0.0959 - mae: 0.2489 - val_loss: 0.0327 - val_mae: 0.1383
Epoch 8/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0864 - mae: 0.2430
8/8 [==============================] - 0s 3ms/step - loss: 0.0900 - mae: 0.2443

8/8 [==============================] - 0s 37ms/step - loss: 0.0900 - mae: 0.2443 - val_loss: 0.0292 - val_mae: 0.1273
Epoch 9/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0730 - mae: 0.2083
8/8 [==============================] - 0s 3ms/step - loss: 0.0792 - mae: 0.2290

8/8 [==============================] - 0s 30ms/step - loss: 0.0792 - mae: 0.2290 - val_loss: 0.0282 - val_mae: 0.1251
Epoch 10/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0652 - mae: 0.2017
8/8 [==============================] - 0s 3ms/step - loss: 0.0863 - mae: 0.2320

8/8 [==============================] - 0s 33ms/step - loss: 0.0863 - mae: 0.2320 - val_loss: 0.0266 - val_mae: 0.1203
Epoch 11/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0894 - mae: 0.2465
8/8 [==============================] - 0s 3ms/step - loss: 0.0776 - mae: 0.2197

8/8 [==============================] - 0s 33ms/step - loss: 0.0776 - mae: 0.2197 - val_loss: 0.0252 - val_mae: 0.1172
Epoch 12/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0841 - mae: 0.2277
8/8 [==============================] - 0s 4ms/step - loss: 0.0678 - mae: 0.2085

8/8 [==============================] - 0s 38ms/step - loss: 0.0678 - mae: 0.2085 - val_loss: 0.0232 - val_mae: 0.1129
Epoch 13/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0661 - mae: 0.2108
8/8 [==============================] - 0s 3ms/step - loss: 0.0641 - mae: 0.2048

8/8 [==============================] - 0s 37ms/step - loss: 0.0641 - mae: 0.2048 - val_loss: 0.0222 - val_mae: 0.1112
Epoch 14/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0613 - mae: 0.1886
8/8 [==============================] - 0s 3ms/step - loss: 0.0666 - mae: 0.2043

8/8 [==============================] - 0s 36ms/step - loss: 0.0666 - mae: 0.2043 - val_loss: 0.0209 - val_mae: 0.1080
Epoch 15/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0639 - mae: 0.2097
8/8 [==============================] - 0s 3ms/step - loss: 0.0592 - mae: 0.1938

8/8 [==============================] - 0s 32ms/step - loss: 0.0592 - mae: 0.1938 - val_loss: 0.0203 - val_mae: 0.1066
Epoch 16/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0540 - mae: 0.1817
8/8 [==============================] - 0s 3ms/step - loss: 0.0706 - mae: 0.2038

8/8 [==============================] - 0s 30ms/step - loss: 0.0706 - mae: 0.2038 - val_loss: 0.0201 - val_mae: 0.1081
Epoch 17/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0635 - mae: 0.1944
8/8 [==============================] - 0s 3ms/step - loss: 0.0567 - mae: 0.1935

8/8 [==============================] - 0s 30ms/step - loss: 0.0567 - mae: 0.1935 - val_loss: 0.0189 - val_mae: 0.1035
Epoch 18/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0505 - mae: 0.1825
8/8 [==============================] - 0s 2ms/step - loss: 0.0539 - mae: 0.1901

8/8 [==============================] - 0s 28ms/step - loss: 0.0539 - mae: 0.1901 - val_loss: 0.0180 - val_mae: 0.1011
Epoch 19/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0649 - mae: 0.2063
8/8 [==============================] - 0s 3ms/step - loss: 0.0591 - mae: 0.1952

8/8 [==============================] - 0s 29ms/step - loss: 0.0591 - mae: 0.1952 - val_loss: 0.0180 - val_mae: 0.1019
Epoch 20/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0587 - mae: 0.1933
8/8 [==============================] - 0s 2ms/step - loss: 0.0515 - mae: 0.1789

8/8 [==============================] - 0s 30ms/step - loss: 0.0515 - mae: 0.1789 - val_loss: 0.0174 - val_mae: 0.1010
Epoch 21/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0442 - mae: 0.1749
8/8 [==============================] - 0s 3ms/step - loss: 0.0497 - mae: 0.1772

8/8 [==============================] - 0s 32ms/step - loss: 0.0497 - mae: 0.1772 - val_loss: 0.0163 - val_mae: 0.0978
Epoch 22/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0529 - mae: 0.1578
8/8 [==============================] - 0s 3ms/step - loss: 0.0478 - mae: 0.1713

8/8 [==============================] - 0s 30ms/step - loss: 0.0478 - mae: 0.1713 - val_loss: 0.0158 - val_mae: 0.0965
Epoch 23/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0353 - mae: 0.1442
8/8 [==============================] - 0s 3ms/step - loss: 0.0509 - mae: 0.1805

8/8 [==============================] - 0s 29ms/step - loss: 0.0509 - mae: 0.1805 - val_loss: 0.0159 - val_mae: 0.0978
Epoch 24/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0403 - mae: 0.1639
8/8 [==============================] - 0s 2ms/step - loss: 0.0459 - mae: 0.1721

8/8 [==============================] - 0s 29ms/step - loss: 0.0459 - mae: 0.1721 - val_loss: 0.0154 - val_mae: 0.0964
Epoch 25/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0497 - mae: 0.1748
8/8 [==============================] - 0s 3ms/step - loss: 0.0452 - mae: 0.1745

8/8 [==============================] - 0s 31ms/step - loss: 0.0452 - mae: 0.1745 - val_loss: 0.0150 - val_mae: 0.0949
Epoch 26/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0609 - mae: 0.1962
8/8 [==============================] - 0s 3ms/step - loss: 0.0495 - mae: 0.1774

8/8 [==============================] - 0s 28ms/step - loss: 0.0495 - mae: 0.1774 - val_loss: 0.0146 - val_mae: 0.0940
Epoch 27/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0326 - mae: 0.1459
8/8 [==============================] - 0s 3ms/step - loss: 0.0426 - mae: 0.1662

8/8 [==============================] - 0s 28ms/step - loss: 0.0426 - mae: 0.1662 - val_loss: 0.0146 - val_mae: 0.0942
Epoch 28/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0480 - mae: 0.1723
8/8 [==============================] - 0s 2ms/step - loss: 0.0405 - mae: 0.1607

8/8 [==============================] - 0s 26ms/step - loss: 0.0405 - mae: 0.1607 - val_loss: 0.0141 - val_mae: 0.0922
Epoch 29/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0449 - mae: 0.1682
8/8 [==============================] - 0s 2ms/step - loss: 0.0402 - mae: 0.1588

8/8 [==============================] - 0s 27ms/step - loss: 0.0402 - mae: 0.1588 - val_loss: 0.0140 - val_mae: 0.0930
Epoch 30/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0343 - mae: 0.1533
8/8 [==============================] - 0s 2ms/step - loss: 0.0402 - mae: 0.1573

8/8 [==============================] - 0s 28ms/step - loss: 0.0402 - mae: 0.1573 - val_loss: 0.0141 - val_mae: 0.0946

Run completed: runs/2022-12-07T02-54-38Z

Training run 48/52 (flags = list(32, 32, 0.001, 30, 30, "relu", "tanh", 0.2, 0.6)) 
Using run directory runs/2022-12-07T02-55-33Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

 1/13 [=>............................] - ETA: 4s - loss: 0.9631 - mae: 0.8401
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0015s vs `on_train_batch_end` time: 0.0026s). Check your callbacks.

13/13 [==============================] - 0s 2ms/step - loss: 0.9475 - mae: 0.8034

13/13 [==============================] - 1s 60ms/step - loss: 0.9475 - mae: 0.8034 - val_loss: 0.4685 - val_mae: 0.5889
Epoch 2/30

 1/13 [=>............................] - ETA: 0s - loss: 0.9897 - mae: 0.7495
13/13 [==============================] - 0s 2ms/step - loss: 0.8864 - mae: 0.7340

13/13 [==============================] - 0s 16ms/step - loss: 0.8864 - mae: 0.7340 - val_loss: 0.3585 - val_mae: 0.5054
Epoch 3/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4477 - mae: 0.5582
13/13 [==============================] - 0s 3ms/step - loss: 0.7051 - mae: 0.6650

13/13 [==============================] - 0s 20ms/step - loss: 0.7051 - mae: 0.6650 - val_loss: 0.2698 - val_mae: 0.4342
Epoch 4/30

 1/13 [=>............................] - ETA: 0s - loss: 0.9728 - mae: 0.8052
13/13 [==============================] - 0s 3ms/step - loss: 0.7548 - mae: 0.6745

13/13 [==============================] - 0s 19ms/step - loss: 0.7548 - mae: 0.6745 - val_loss: 0.2164 - val_mae: 0.3847
Epoch 5/30

 1/13 [=>............................] - ETA: 0s - loss: 0.9386 - mae: 0.7764
13/13 [==============================] - 0s 2ms/step - loss: 0.6200 - mae: 0.6322

13/13 [==============================] - 0s 17ms/step - loss: 0.6200 - mae: 0.6322 - val_loss: 0.1781 - val_mae: 0.3479
Epoch 6/30

 1/13 [=>............................] - ETA: 0s - loss: 0.5339 - mae: 0.5233
13/13 [==============================] - 0s 2ms/step - loss: 0.6766 - mae: 0.6355

13/13 [==============================] - 0s 15ms/step - loss: 0.6766 - mae: 0.6355 - val_loss: 0.1542 - val_mae: 0.3227
Epoch 7/30

 1/13 [=>............................] - ETA: 0s - loss: 0.7360 - mae: 0.6471
13/13 [==============================] - 0s 2ms/step - loss: 0.5814 - mae: 0.5910

13/13 [==============================] - 0s 17ms/step - loss: 0.5814 - mae: 0.5910 - val_loss: 0.1333 - val_mae: 0.3014
Epoch 8/30

 1/13 [=>............................] - ETA: 0s - loss: 0.3374 - mae: 0.4845
13/13 [==============================] - 0s 3ms/step - loss: 0.5247 - mae: 0.5721

13/13 [==============================] - 0s 20ms/step - loss: 0.5247 - mae: 0.5721 - val_loss: 0.1211 - val_mae: 0.2879
Epoch 9/30

 1/13 [=>............................] - ETA: 0s - loss: 0.5560 - mae: 0.5946
13/13 [==============================] - 0s 3ms/step - loss: 0.5050 - mae: 0.5697

13/13 [==============================] - 0s 19ms/step - loss: 0.5050 - mae: 0.5697 - val_loss: 0.1127 - val_mae: 0.2753
Epoch 10/30

 1/13 [=>............................] - ETA: 0s - loss: 0.5972 - mae: 0.6292
13/13 [==============================] - 0s 2ms/step - loss: 0.5287 - mae: 0.5788

13/13 [==============================] - 0s 17ms/step - loss: 0.5287 - mae: 0.5788 - val_loss: 0.1077 - val_mae: 0.2692
Epoch 11/30

 1/13 [=>............................] - ETA: 0s - loss: 0.5932 - mae: 0.6140
13/13 [==============================] - 0s 2ms/step - loss: 0.5703 - mae: 0.5874

13/13 [==============================] - 0s 16ms/step - loss: 0.5703 - mae: 0.5874 - val_loss: 0.1006 - val_mae: 0.2601
Epoch 12/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4832 - mae: 0.5802
13/13 [==============================] - 0s 2ms/step - loss: 0.5068 - mae: 0.5471

13/13 [==============================] - 0s 18ms/step - loss: 0.5068 - mae: 0.5471 - val_loss: 0.0974 - val_mae: 0.2576
Epoch 13/30

 1/13 [=>............................] - ETA: 0s - loss: 0.7238 - mae: 0.6715
13/13 [==============================] - 0s 2ms/step - loss: 0.4571 - mae: 0.5217

13/13 [==============================] - 0s 15ms/step - loss: 0.4571 - mae: 0.5217 - val_loss: 0.0922 - val_mae: 0.2514
Epoch 14/30

 1/13 [=>............................] - ETA: 0s - loss: 0.6686 - mae: 0.6322
13/13 [==============================] - 0s 4ms/step - loss: 0.4484 - mae: 0.5132

13/13 [==============================] - 1s 51ms/step - loss: 0.4484 - mae: 0.5132 - val_loss: 0.0899 - val_mae: 0.2491
Epoch 15/30

 1/13 [=>............................] - ETA: 0s - loss: 0.5860 - mae: 0.6034
13/13 [==============================] - 0s 3ms/step - loss: 0.4653 - mae: 0.5325

13/13 [==============================] - 0s 18ms/step - loss: 0.4653 - mae: 0.5325 - val_loss: 0.0849 - val_mae: 0.2414
Epoch 16/30

 1/13 [=>............................] - ETA: 0s - loss: 0.5562 - mae: 0.5612
13/13 [==============================] - 0s 3ms/step - loss: 0.4529 - mae: 0.5107

13/13 [==============================] - 0s 17ms/step - loss: 0.4529 - mae: 0.5107 - val_loss: 0.0815 - val_mae: 0.2361
Epoch 17/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4713 - mae: 0.5262
13/13 [==============================] - 0s 2ms/step - loss: 0.4584 - mae: 0.5282

13/13 [==============================] - 0s 16ms/step - loss: 0.4584 - mae: 0.5282 - val_loss: 0.0783 - val_mae: 0.2304
Epoch 18/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4979 - mae: 0.5861
13/13 [==============================] - 0s 2ms/step - loss: 0.4157 - mae: 0.5137

13/13 [==============================] - 0s 17ms/step - loss: 0.4157 - mae: 0.5137 - val_loss: 0.0742 - val_mae: 0.2223
Epoch 19/30

 1/13 [=>............................] - ETA: 0s - loss: 0.6849 - mae: 0.6183
13/13 [==============================] - 0s 2ms/step - loss: 0.4240 - mae: 0.5136

13/13 [==============================] - 0s 16ms/step - loss: 0.4240 - mae: 0.5136 - val_loss: 0.0725 - val_mae: 0.2208
Epoch 20/30

 1/13 [=>............................] - ETA: 0s - loss: 0.3374 - mae: 0.4356
13/13 [==============================] - 0s 2ms/step - loss: 0.3573 - mae: 0.4656

13/13 [==============================] - 0s 17ms/step - loss: 0.3573 - mae: 0.4656 - val_loss: 0.0709 - val_mae: 0.2179
Epoch 21/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4146 - mae: 0.5390
13/13 [==============================] - 0s 2ms/step - loss: 0.3783 - mae: 0.4910

13/13 [==============================] - 0s 17ms/step - loss: 0.3783 - mae: 0.4910 - val_loss: 0.0690 - val_mae: 0.2136
Epoch 22/30

 1/13 [=>............................] - ETA: 0s - loss: 0.3360 - mae: 0.4765
13/13 [==============================] - 0s 2ms/step - loss: 0.3747 - mae: 0.4930

13/13 [==============================] - 0s 16ms/step - loss: 0.3747 - mae: 0.4930 - val_loss: 0.0677 - val_mae: 0.2130
Epoch 23/30

 1/13 [=>............................] - ETA: 0s - loss: 0.3332 - mae: 0.4486
13/13 [==============================] - 0s 2ms/step - loss: 0.3961 - mae: 0.4803

13/13 [==============================] - 0s 16ms/step - loss: 0.3961 - mae: 0.4803 - val_loss: 0.0654 - val_mae: 0.2101
Epoch 24/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4638 - mae: 0.5127
13/13 [==============================] - 0s 2ms/step - loss: 0.3383 - mae: 0.4623

13/13 [==============================] - 0s 17ms/step - loss: 0.3383 - mae: 0.4623 - val_loss: 0.0634 - val_mae: 0.2068
Epoch 25/30

 1/13 [=>............................] - ETA: 0s - loss: 0.3270 - mae: 0.4683
13/13 [==============================] - 0s 3ms/step - loss: 0.3525 - mae: 0.4636

13/13 [==============================] - 0s 18ms/step - loss: 0.3525 - mae: 0.4636 - val_loss: 0.0615 - val_mae: 0.2043
Epoch 26/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1563 - mae: 0.2971
13/13 [==============================] - 0s 3ms/step - loss: 0.3782 - mae: 0.4663

13/13 [==============================] - 0s 18ms/step - loss: 0.3782 - mae: 0.4663 - val_loss: 0.0603 - val_mae: 0.2028
Epoch 27/30

 1/13 [=>............................] - ETA: 0s - loss: 0.5106 - mae: 0.5075
13/13 [==============================] - 0s 2ms/step - loss: 0.3453 - mae: 0.4664

13/13 [==============================] - 0s 16ms/step - loss: 0.3453 - mae: 0.4664 - val_loss: 0.0596 - val_mae: 0.2042
Epoch 28/30

 1/13 [=>............................] - ETA: 0s - loss: 0.3138 - mae: 0.4423
13/13 [==============================] - 0s 2ms/step - loss: 0.3425 - mae: 0.4724

13/13 [==============================] - 0s 17ms/step - loss: 0.3425 - mae: 0.4724 - val_loss: 0.0599 - val_mae: 0.2057
Epoch 29/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2670 - mae: 0.4209
13/13 [==============================] - 0s 2ms/step - loss: 0.2954 - mae: 0.4257

13/13 [==============================] - 0s 17ms/step - loss: 0.2954 - mae: 0.4257 - val_loss: 0.0589 - val_mae: 0.2049
Epoch 30/30

 1/13 [=>............................] - ETA: 0s - loss: 0.4458 - mae: 0.5486
13/13 [==============================] - 0s 3ms/step - loss: 0.3277 - mae: 0.4445

13/13 [==============================] - 0s 18ms/step - loss: 0.3277 - mae: 0.4445 - val_loss: 0.0583 - val_mae: 0.2049

Run completed: runs/2022-12-07T02-55-33Z

Training run 49/52 (flags = list(32, 50, 0.01, 30, 50, "relu", "tanh", 0.2, 0.6)) 
Using run directory runs/2022-12-07T02-55-52Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

 1/13 [=>............................] - ETA: 7s - loss: 0.8308 - mae: 0.7865
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0016s vs `on_train_batch_end` time: 0.0031s). Check your callbacks.

13/13 [==============================] - 1s 2ms/step - loss: 0.6333 - mae: 0.6283

13/13 [==============================] - 1s 61ms/step - loss: 0.6333 - mae: 0.6283 - val_loss: 0.0400 - val_mae: 0.1416
Epoch 2/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5382 - mae: 0.5765
13/13 [==============================] - 0s 2ms/step - loss: 0.2989 - mae: 0.4238

13/13 [==============================] - 0s 15ms/step - loss: 0.2989 - mae: 0.4238 - val_loss: 0.0247 - val_mae: 0.1273
Epoch 3/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2037 - mae: 0.3477
13/13 [==============================] - 0s 3ms/step - loss: 0.2423 - mae: 0.3815

13/13 [==============================] - 0s 18ms/step - loss: 0.2423 - mae: 0.3815 - val_loss: 0.0179 - val_mae: 0.0959
Epoch 4/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1977 - mae: 0.3631
13/13 [==============================] - 0s 2ms/step - loss: 0.2030 - mae: 0.3551

13/13 [==============================] - 0s 18ms/step - loss: 0.2030 - mae: 0.3551 - val_loss: 0.0202 - val_mae: 0.1152
Epoch 5/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2304 - mae: 0.3658
13/13 [==============================] - 0s 3ms/step - loss: 0.1761 - mae: 0.3244

13/13 [==============================] - 0s 20ms/step - loss: 0.1761 - mae: 0.3244 - val_loss: 0.0160 - val_mae: 0.0962
Epoch 6/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1498 - mae: 0.3058
13/13 [==============================] - 0s 2ms/step - loss: 0.1448 - mae: 0.2918

13/13 [==============================] - 0s 17ms/step - loss: 0.1448 - mae: 0.2918 - val_loss: 0.0158 - val_mae: 0.0941
Epoch 7/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1240 - mae: 0.2692
13/13 [==============================] - 0s 3ms/step - loss: 0.1529 - mae: 0.3139

13/13 [==============================] - 0s 18ms/step - loss: 0.1529 - mae: 0.3139 - val_loss: 0.0134 - val_mae: 0.0906
Epoch 8/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1181 - mae: 0.2609
13/13 [==============================] - 0s 3ms/step - loss: 0.1428 - mae: 0.3008

13/13 [==============================] - 0s 23ms/step - loss: 0.1428 - mae: 0.3008 - val_loss: 0.0114 - val_mae: 0.0794
Epoch 9/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1543 - mae: 0.3216
13/13 [==============================] - 0s 3ms/step - loss: 0.1340 - mae: 0.2907

13/13 [==============================] - 0s 19ms/step - loss: 0.1340 - mae: 0.2907 - val_loss: 0.0109 - val_mae: 0.0788
Epoch 10/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0700 - mae: 0.2150
13/13 [==============================] - 0s 2ms/step - loss: 0.1067 - mae: 0.2603

13/13 [==============================] - 0s 17ms/step - loss: 0.1067 - mae: 0.2603 - val_loss: 0.0110 - val_mae: 0.0843
Epoch 11/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0734 - mae: 0.2348
13/13 [==============================] - 0s 3ms/step - loss: 0.0873 - mae: 0.2314

13/13 [==============================] - 0s 20ms/step - loss: 0.0873 - mae: 0.2314 - val_loss: 0.0106 - val_mae: 0.0809
Epoch 12/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0449 - mae: 0.1548
13/13 [==============================] - 0s 3ms/step - loss: 0.0979 - mae: 0.2413

13/13 [==============================] - 0s 20ms/step - loss: 0.0979 - mae: 0.2413 - val_loss: 0.0097 - val_mae: 0.0739
Epoch 13/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0500 - mae: 0.1699
13/13 [==============================] - 0s 2ms/step - loss: 0.0941 - mae: 0.2401

13/13 [==============================] - 0s 29ms/step - loss: 0.0941 - mae: 0.2401 - val_loss: 0.0094 - val_mae: 0.0762
Epoch 14/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1061 - mae: 0.2625
13/13 [==============================] - 0s 3ms/step - loss: 0.0982 - mae: 0.2482

13/13 [==============================] - 0s 18ms/step - loss: 0.0982 - mae: 0.2482 - val_loss: 0.0091 - val_mae: 0.0830
Epoch 15/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0804 - mae: 0.2361
13/13 [==============================] - 0s 3ms/step - loss: 0.0973 - mae: 0.2431

13/13 [==============================] - 0s 17ms/step - loss: 0.0973 - mae: 0.2431 - val_loss: 0.0085 - val_mae: 0.0803
Epoch 16/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0725 - mae: 0.2156
13/13 [==============================] - 0s 3ms/step - loss: 0.0735 - mae: 0.2173

13/13 [==============================] - 0s 18ms/step - loss: 0.0735 - mae: 0.2173 - val_loss: 0.0075 - val_mae: 0.0682
Epoch 17/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0459 - mae: 0.1646
13/13 [==============================] - 0s 2ms/step - loss: 0.0721 - mae: 0.2108

13/13 [==============================] - 0s 17ms/step - loss: 0.0721 - mae: 0.2108 - val_loss: 0.0078 - val_mae: 0.0610
Epoch 18/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0486 - mae: 0.1854
13/13 [==============================] - 0s 3ms/step - loss: 0.0753 - mae: 0.2166

13/13 [==============================] - 0s 17ms/step - loss: 0.0753 - mae: 0.2166 - val_loss: 0.0078 - val_mae: 0.0601
Epoch 19/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0740 - mae: 0.2235
13/13 [==============================] - 0s 2ms/step - loss: 0.0694 - mae: 0.2111

13/13 [==============================] - 0s 17ms/step - loss: 0.0694 - mae: 0.2111 - val_loss: 0.0081 - val_mae: 0.0765
Epoch 20/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0873 - mae: 0.2257
13/13 [==============================] - 0s 3ms/step - loss: 0.0748 - mae: 0.2141

13/13 [==============================] - 0s 20ms/step - loss: 0.0748 - mae: 0.2141 - val_loss: 0.0078 - val_mae: 0.0582
Epoch 21/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0879 - mae: 0.2430
13/13 [==============================] - 0s 4ms/step - loss: 0.0580 - mae: 0.1959

13/13 [==============================] - 0s 23ms/step - loss: 0.0580 - mae: 0.1959 - val_loss: 0.0070 - val_mae: 0.0567
Epoch 22/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0496 - mae: 0.1703
13/13 [==============================] - 0s 3ms/step - loss: 0.0568 - mae: 0.1833

13/13 [==============================] - 0s 21ms/step - loss: 0.0568 - mae: 0.1833 - val_loss: 0.0064 - val_mae: 0.0619
Epoch 23/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0522 - mae: 0.1948
13/13 [==============================] - 0s 2ms/step - loss: 0.0508 - mae: 0.1747

13/13 [==============================] - 0s 17ms/step - loss: 0.0508 - mae: 0.1747 - val_loss: 0.0063 - val_mae: 0.0655
Epoch 24/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0332 - mae: 0.1368
13/13 [==============================] - 0s 3ms/step - loss: 0.0544 - mae: 0.1825

13/13 [==============================] - 0s 18ms/step - loss: 0.0544 - mae: 0.1825 - val_loss: 0.0063 - val_mae: 0.0660
Epoch 25/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0628 - mae: 0.2035
13/13 [==============================] - 0s 3ms/step - loss: 0.0594 - mae: 0.1945

13/13 [==============================] - 0s 20ms/step - loss: 0.0594 - mae: 0.1945 - val_loss: 0.0060 - val_mae: 0.0599
Epoch 26/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0443 - mae: 0.1686
13/13 [==============================] - 0s 3ms/step - loss: 0.0532 - mae: 0.1758

13/13 [==============================] - 0s 18ms/step - loss: 0.0532 - mae: 0.1758 - val_loss: 0.0058 - val_mae: 0.0629
Epoch 27/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0346 - mae: 0.1500
13/13 [==============================] - 0s 3ms/step - loss: 0.0393 - mae: 0.1573

13/13 [==============================] - 0s 19ms/step - loss: 0.0393 - mae: 0.1573 - val_loss: 0.0059 - val_mae: 0.0653
Epoch 28/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0512 - mae: 0.1710
13/13 [==============================] - 0s 3ms/step - loss: 0.0510 - mae: 0.1779

13/13 [==============================] - 0s 21ms/step - loss: 0.0510 - mae: 0.1779 - val_loss: 0.0053 - val_mae: 0.0562
Epoch 29/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0323 - mae: 0.1434
13/13 [==============================] - 0s 3ms/step - loss: 0.0418 - mae: 0.1595

13/13 [==============================] - 0s 20ms/step - loss: 0.0418 - mae: 0.1595 - val_loss: 0.0052 - val_mae: 0.0580
Epoch 30/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0347 - mae: 0.1414
13/13 [==============================] - 0s 3ms/step - loss: 0.0414 - mae: 0.1560

13/13 [==============================] - 0s 21ms/step - loss: 0.0414 - mae: 0.1560 - val_loss: 0.0053 - val_mae: 0.0579
Epoch 31/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0371 - mae: 0.1629
13/13 [==============================] - 0s 3ms/step - loss: 0.0417 - mae: 0.1597

13/13 [==============================] - 0s 18ms/step - loss: 0.0417 - mae: 0.1597 - val_loss: 0.0053 - val_mae: 0.0577
Epoch 32/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0508 - mae: 0.1898
13/13 [==============================] - 0s 3ms/step - loss: 0.0331 - mae: 0.1436

13/13 [==============================] - 0s 22ms/step - loss: 0.0331 - mae: 0.1436 - val_loss: 0.0051 - val_mae: 0.0565
Epoch 33/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0297 - mae: 0.1334
13/13 [==============================] - 0s 3ms/step - loss: 0.0406 - mae: 0.1604

13/13 [==============================] - 0s 24ms/step - loss: 0.0406 - mae: 0.1604 - val_loss: 0.0052 - val_mae: 0.0604
Epoch 34/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0477 - mae: 0.1768
12/13 [==========================>...] - ETA: 0s - loss: 0.0366 - mae: 0.1488
13/13 [==============================] - 0s 6ms/step - loss: 0.0369 - mae: 0.1496

13/13 [==============================] - 0s 23ms/step - loss: 0.0369 - mae: 0.1496 - val_loss: 0.0062 - val_mae: 0.0698
Epoch 35/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0354 - mae: 0.1421
13/13 [==============================] - 0s 3ms/step - loss: 0.0347 - mae: 0.1451

13/13 [==============================] - 0s 21ms/step - loss: 0.0347 - mae: 0.1451 - val_loss: 0.0061 - val_mae: 0.0687
Epoch 36/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0399 - mae: 0.1724
13/13 [==============================] - 0s 3ms/step - loss: 0.0373 - mae: 0.1523

13/13 [==============================] - 0s 22ms/step - loss: 0.0373 - mae: 0.1523 - val_loss: 0.0057 - val_mae: 0.0655
Epoch 37/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0420 - mae: 0.1700
13/13 [==============================] - 0s 3ms/step - loss: 0.0354 - mae: 0.1488

13/13 [==============================] - 0s 20ms/step - loss: 0.0354 - mae: 0.1488 - val_loss: 0.0054 - val_mae: 0.0616
Epoch 38/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0271 - mae: 0.1381
13/13 [==============================] - 0s 3ms/step - loss: 0.0277 - mae: 0.1293

13/13 [==============================] - 0s 19ms/step - loss: 0.0277 - mae: 0.1293 - val_loss: 0.0051 - val_mae: 0.0573
Epoch 39/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0355 - mae: 0.1538
13/13 [==============================] - 0s 3ms/step - loss: 0.0332 - mae: 0.1442

13/13 [==============================] - 0s 19ms/step - loss: 0.0332 - mae: 0.1442 - val_loss: 0.0047 - val_mae: 0.0532
Epoch 40/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0249 - mae: 0.1296
13/13 [==============================] - 0s 3ms/step - loss: 0.0319 - mae: 0.1409

13/13 [==============================] - 0s 20ms/step - loss: 0.0319 - mae: 0.1409 - val_loss: 0.0050 - val_mae: 0.0545
Epoch 41/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0282 - mae: 0.1280
13/13 [==============================] - 0s 4ms/step - loss: 0.0335 - mae: 0.1397

13/13 [==============================] - 0s 25ms/step - loss: 0.0335 - mae: 0.1397 - val_loss: 0.0057 - val_mae: 0.0655
Epoch 42/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0188 - mae: 0.1150
13/13 [==============================] - 0s 3ms/step - loss: 0.0292 - mae: 0.1358

13/13 [==============================] - 0s 19ms/step - loss: 0.0292 - mae: 0.1358 - val_loss: 0.0051 - val_mae: 0.0599
Epoch 43/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0274 - mae: 0.1319
13/13 [==============================] - 0s 2ms/step - loss: 0.0271 - mae: 0.1308

13/13 [==============================] - 0s 17ms/step - loss: 0.0271 - mae: 0.1308 - val_loss: 0.0047 - val_mae: 0.0547
Epoch 44/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0177 - mae: 0.1110
13/13 [==============================] - 0s 3ms/step - loss: 0.0268 - mae: 0.1328

13/13 [==============================] - 0s 20ms/step - loss: 0.0268 - mae: 0.1328 - val_loss: 0.0047 - val_mae: 0.0520
Epoch 45/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0314 - mae: 0.1442
13/13 [==============================] - 0s 4ms/step - loss: 0.0277 - mae: 0.1322

13/13 [==============================] - 0s 27ms/step - loss: 0.0277 - mae: 0.1322 - val_loss: 0.0047 - val_mae: 0.0558
Epoch 46/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0403 - mae: 0.1533
13/13 [==============================] - 0s 4ms/step - loss: 0.0275 - mae: 0.1309

13/13 [==============================] - 0s 23ms/step - loss: 0.0275 - mae: 0.1309 - val_loss: 0.0047 - val_mae: 0.0565
Epoch 47/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0140 - mae: 0.0968
13/13 [==============================] - 0s 2ms/step - loss: 0.0237 - mae: 0.1205

13/13 [==============================] - 0s 17ms/step - loss: 0.0237 - mae: 0.1205 - val_loss: 0.0047 - val_mae: 0.0578
Epoch 48/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0223 - mae: 0.1126
13/13 [==============================] - 0s 2ms/step - loss: 0.0261 - mae: 0.1275

13/13 [==============================] - 0s 17ms/step - loss: 0.0261 - mae: 0.1275 - val_loss: 0.0045 - val_mae: 0.0537
Epoch 49/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0213 - mae: 0.1162
13/13 [==============================] - 0s 3ms/step - loss: 0.0248 - mae: 0.1223

13/13 [==============================] - 0s 22ms/step - loss: 0.0248 - mae: 0.1223 - val_loss: 0.0044 - val_mae: 0.0524
Epoch 50/50

 1/13 [=>............................] - ETA: 0s - loss: 0.0208 - mae: 0.1058
13/13 [==============================] - 0s 3ms/step - loss: 0.0248 - mae: 0.1254

13/13 [==============================] - 0s 22ms/step - loss: 0.0248 - mae: 0.1254 - val_loss: 0.0043 - val_mae: 0.0501

Run completed: runs/2022-12-07T02-55-52Z

Training run 50/52 (flags = list(64, 10, 0.01, 30, 30, "sigmoid", "sigmoid", 0.6, 0.6)) 
Using run directory runs/2022-12-07T02-56-18Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

 1/13 [=>............................] - ETA: 4s - loss: 2.1638 - mae: 1.1959
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0018s vs `on_train_batch_end` time: 0.0030s). Check your callbacks.

13/13 [==============================] - 0s 3ms/step - loss: 1.1893 - mae: 0.8671

13/13 [==============================] - 1s 63ms/step - loss: 1.1893 - mae: 0.8671 - val_loss: 0.0760 - val_mae: 0.2523
Epoch 2/30

 1/13 [=>............................] - ETA: 0s - loss: 0.7074 - mae: 0.7121
13/13 [==============================] - 0s 2ms/step - loss: 0.6210 - mae: 0.6558

13/13 [==============================] - 0s 17ms/step - loss: 0.6210 - mae: 0.6558 - val_loss: 0.0295 - val_mae: 0.1453
Epoch 3/30

 1/13 [=>............................] - ETA: 0s - loss: 0.5375 - mae: 0.6139
13/13 [==============================] - 0s 3ms/step - loss: 0.4704 - mae: 0.5641

13/13 [==============================] - 0s 23ms/step - loss: 0.4704 - mae: 0.5641 - val_loss: 0.0212 - val_mae: 0.1147
Epoch 4/30

 1/13 [=>............................] - ETA: 0s - loss: 0.5608 - mae: 0.6076
13/13 [==============================] - 0s 3ms/step - loss: 0.4009 - mae: 0.5135

13/13 [==============================] - 0s 21ms/step - loss: 0.4009 - mae: 0.5135 - val_loss: 0.0185 - val_mae: 0.1056
Epoch 5/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2713 - mae: 0.4163
13/13 [==============================] - 0s 3ms/step - loss: 0.2657 - mae: 0.4177

13/13 [==============================] - 0s 17ms/step - loss: 0.2657 - mae: 0.4177 - val_loss: 0.0207 - val_mae: 0.1123
Epoch 6/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1752 - mae: 0.3339
13/13 [==============================] - 0s 3ms/step - loss: 0.2433 - mae: 0.4071

13/13 [==============================] - 0s 17ms/step - loss: 0.2433 - mae: 0.4071 - val_loss: 0.0182 - val_mae: 0.1052
Epoch 7/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2049 - mae: 0.3624
13/13 [==============================] - 0s 3ms/step - loss: 0.2306 - mae: 0.3899

13/13 [==============================] - 0s 20ms/step - loss: 0.2306 - mae: 0.3899 - val_loss: 0.0210 - val_mae: 0.1130
Epoch 8/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1157 - mae: 0.2809
13/13 [==============================] - 0s 3ms/step - loss: 0.1558 - mae: 0.3116

13/13 [==============================] - 0s 21ms/step - loss: 0.1558 - mae: 0.3116 - val_loss: 0.0223 - val_mae: 0.1170
Epoch 9/30

 1/13 [=>............................] - ETA: 0s - loss: 0.2076 - mae: 0.3850
13/13 [==============================] - 0s 3ms/step - loss: 0.1525 - mae: 0.3188

13/13 [==============================] - 0s 18ms/step - loss: 0.1525 - mae: 0.3188 - val_loss: 0.0172 - val_mae: 0.1029
Epoch 10/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1745 - mae: 0.3154
13/13 [==============================] - 0s 3ms/step - loss: 0.1292 - mae: 0.2944

13/13 [==============================] - 0s 17ms/step - loss: 0.1292 - mae: 0.2944 - val_loss: 0.0175 - val_mae: 0.1038
Epoch 11/30

 1/13 [=>............................] - ETA: 0s - loss: 0.1104 - mae: 0.2677
13/13 [==============================] - 0s 2ms/step - loss: 0.1127 - mae: 0.2757

13/13 [==============================] - 0s 17ms/step - loss: 0.1127 - mae: 0.2757 - val_loss: 0.0188 - val_mae: 0.1078
Epoch 12/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0751 - mae: 0.2407
13/13 [==============================] - 0s 3ms/step - loss: 0.0902 - mae: 0.2452

13/13 [==============================] - 0s 17ms/step - loss: 0.0902 - mae: 0.2452 - val_loss: 0.0174 - val_mae: 0.1036
Epoch 13/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0650 - mae: 0.2054
13/13 [==============================] - 0s 3ms/step - loss: 0.0790 - mae: 0.2248

13/13 [==============================] - 0s 17ms/step - loss: 0.0790 - mae: 0.2248 - val_loss: 0.0184 - val_mae: 0.1067
Epoch 14/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0671 - mae: 0.1940
13/13 [==============================] - 0s 3ms/step - loss: 0.0745 - mae: 0.2155

13/13 [==============================] - 0s 17ms/step - loss: 0.0745 - mae: 0.2155 - val_loss: 0.0174 - val_mae: 0.1040
Epoch 15/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0562 - mae: 0.1776
13/13 [==============================] - 0s 3ms/step - loss: 0.0716 - mae: 0.2144

13/13 [==============================] - 0s 17ms/step - loss: 0.0716 - mae: 0.2144 - val_loss: 0.0172 - val_mae: 0.1037
Epoch 16/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0721 - mae: 0.2400
13/13 [==============================] - 0s 2ms/step - loss: 0.0615 - mae: 0.2048

13/13 [==============================] - 0s 17ms/step - loss: 0.0615 - mae: 0.2048 - val_loss: 0.0172 - val_mae: 0.1036
Epoch 17/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0507 - mae: 0.1790
13/13 [==============================] - 0s 3ms/step - loss: 0.0593 - mae: 0.1968

13/13 [==============================] - 0s 17ms/step - loss: 0.0593 - mae: 0.1968 - val_loss: 0.0171 - val_mae: 0.1034
Epoch 18/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0663 - mae: 0.1998
13/13 [==============================] - 0s 2ms/step - loss: 0.0579 - mae: 0.1940

13/13 [==============================] - 0s 17ms/step - loss: 0.0579 - mae: 0.1940 - val_loss: 0.0172 - val_mae: 0.1040
Epoch 19/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0680 - mae: 0.2092
13/13 [==============================] - 0s 3ms/step - loss: 0.0495 - mae: 0.1785

13/13 [==============================] - 0s 17ms/step - loss: 0.0495 - mae: 0.1785 - val_loss: 0.0174 - val_mae: 0.1042
Epoch 20/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0505 - mae: 0.1774
13/13 [==============================] - 0s 2ms/step - loss: 0.0501 - mae: 0.1842

13/13 [==============================] - 0s 17ms/step - loss: 0.0501 - mae: 0.1842 - val_loss: 0.0180 - val_mae: 0.1057
Epoch 21/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0423 - mae: 0.1583
13/13 [==============================] - 0s 2ms/step - loss: 0.0441 - mae: 0.1687

13/13 [==============================] - 0s 17ms/step - loss: 0.0441 - mae: 0.1687 - val_loss: 0.0178 - val_mae: 0.1054
Epoch 22/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0415 - mae: 0.1724
13/13 [==============================] - 0s 2ms/step - loss: 0.0393 - mae: 0.1600

13/13 [==============================] - 0s 17ms/step - loss: 0.0393 - mae: 0.1600 - val_loss: 0.0174 - val_mae: 0.1046
Epoch 23/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0404 - mae: 0.1536
13/13 [==============================] - 0s 3ms/step - loss: 0.0394 - mae: 0.1598

13/13 [==============================] - 0s 17ms/step - loss: 0.0394 - mae: 0.1598 - val_loss: 0.0174 - val_mae: 0.1047
Epoch 24/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0346 - mae: 0.1286
13/13 [==============================] - 0s 3ms/step - loss: 0.0391 - mae: 0.1582

13/13 [==============================] - 0s 18ms/step - loss: 0.0391 - mae: 0.1582 - val_loss: 0.0176 - val_mae: 0.1053
Epoch 25/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0270 - mae: 0.1328
13/13 [==============================] - 0s 2ms/step - loss: 0.0329 - mae: 0.1420

13/13 [==============================] - 0s 17ms/step - loss: 0.0329 - mae: 0.1420 - val_loss: 0.0176 - val_mae: 0.1051
Epoch 26/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0262 - mae: 0.1392
13/13 [==============================] - 0s 3ms/step - loss: 0.0320 - mae: 0.1451

13/13 [==============================] - 0s 17ms/step - loss: 0.0320 - mae: 0.1451 - val_loss: 0.0189 - val_mae: 0.1087
Epoch 27/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0413 - mae: 0.1788
13/13 [==============================] - 0s 3ms/step - loss: 0.0299 - mae: 0.1407

13/13 [==============================] - 0s 19ms/step - loss: 0.0299 - mae: 0.1407 - val_loss: 0.0176 - val_mae: 0.1050
Epoch 28/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0257 - mae: 0.1264
13/13 [==============================] - 0s 3ms/step - loss: 0.0272 - mae: 0.1285

13/13 [==============================] - 0s 18ms/step - loss: 0.0272 - mae: 0.1285 - val_loss: 0.0174 - val_mae: 0.1047
Epoch 29/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0245 - mae: 0.1175
13/13 [==============================] - 0s 3ms/step - loss: 0.0269 - mae: 0.1328

13/13 [==============================] - 0s 18ms/step - loss: 0.0269 - mae: 0.1328 - val_loss: 0.0176 - val_mae: 0.1052
Epoch 30/30

 1/13 [=>............................] - ETA: 0s - loss: 0.0250 - mae: 0.1287
13/13 [==============================] - 0s 3ms/step - loss: 0.0301 - mae: 0.1401

13/13 [==============================] - 0s 17ms/step - loss: 0.0301 - mae: 0.1401 - val_loss: 0.0175 - val_mae: 0.1049

Run completed: runs/2022-12-07T02-56-18Z

Training run 51/52 (flags = list(64, 10, 0.01, 50, 30, "sigmoid", "relu", 0.2, 0.6)) 
Using run directory runs/2022-12-07T02-56-39Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/30

1/8 [==>...........................] - ETA: 3s - loss: 1.7889 - mae: 1.2014
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0017s vs `on_train_batch_end` time: 0.0028s). Check your callbacks.

8/8 [==============================] - 0s 2ms/step - loss: 0.7747 - mae: 0.7631

8/8 [==============================] - 1s 110ms/step - loss: 0.7747 - mae: 0.7631 - val_loss: 0.1106 - val_mae: 0.3065
Epoch 2/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3319 - mae: 0.5132
8/8 [==============================] - 0s 2ms/step - loss: 0.3733 - mae: 0.5350

8/8 [==============================] - 0s 28ms/step - loss: 0.3733 - mae: 0.5350 - val_loss: 0.1362 - val_mae: 0.3461
Epoch 3/30

1/8 [==>...........................] - ETA: 0s - loss: 0.3963 - mae: 0.5605
8/8 [==============================] - 0s 4ms/step - loss: 0.3144 - mae: 0.4956

8/8 [==============================] - 0s 46ms/step - loss: 0.3144 - mae: 0.4956 - val_loss: 0.1120 - val_mae: 0.3105
Epoch 4/30

1/8 [==>...........................] - ETA: 0s - loss: 0.2572 - mae: 0.4543
8/8 [==============================] - 0s 3ms/step - loss: 0.2242 - mae: 0.4169

8/8 [==============================] - 0s 39ms/step - loss: 0.2242 - mae: 0.4169 - val_loss: 0.1047 - val_mae: 0.2996
Epoch 5/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1887 - mae: 0.3791
8/8 [==============================] - 0s 3ms/step - loss: 0.2080 - mae: 0.4037

8/8 [==============================] - 0s 33ms/step - loss: 0.2080 - mae: 0.4037 - val_loss: 0.0904 - val_mae: 0.2750
Epoch 6/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1754 - mae: 0.3559
8/8 [==============================] - 0s 2ms/step - loss: 0.1638 - mae: 0.3460

8/8 [==============================] - 0s 33ms/step - loss: 0.1638 - mae: 0.3460 - val_loss: 0.0772 - val_mae: 0.2513
Epoch 7/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1616 - mae: 0.3551
8/8 [==============================] - 0s 4ms/step - loss: 0.1383 - mae: 0.3149

8/8 [==============================] - 0s 47ms/step - loss: 0.1383 - mae: 0.3149 - val_loss: 0.0694 - val_mae: 0.2372
Epoch 8/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1138 - mae: 0.2945
8/8 [==============================] - 0s 3ms/step - loss: 0.1199 - mae: 0.2917

8/8 [==============================] - 0s 34ms/step - loss: 0.1199 - mae: 0.2917 - val_loss: 0.0659 - val_mae: 0.2302
Epoch 9/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1199 - mae: 0.2961
8/8 [==============================] - 0s 3ms/step - loss: 0.1251 - mae: 0.3026

8/8 [==============================] - 0s 31ms/step - loss: 0.1251 - mae: 0.3026 - val_loss: 0.0605 - val_mae: 0.2190
Epoch 10/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0733 - mae: 0.2303
8/8 [==============================] - 0s 3ms/step - loss: 0.0942 - mae: 0.2576

8/8 [==============================] - 0s 28ms/step - loss: 0.0942 - mae: 0.2576 - val_loss: 0.0558 - val_mae: 0.2084
Epoch 11/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0830 - mae: 0.2558
8/8 [==============================] - 0s 3ms/step - loss: 0.0818 - mae: 0.2444

8/8 [==============================] - 0s 30ms/step - loss: 0.0818 - mae: 0.2444 - val_loss: 0.0501 - val_mae: 0.1958
Epoch 12/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0562 - mae: 0.1989
8/8 [==============================] - 0s 3ms/step - loss: 0.0750 - mae: 0.2246

8/8 [==============================] - 0s 37ms/step - loss: 0.0750 - mae: 0.2246 - val_loss: 0.0444 - val_mae: 0.1824
Epoch 13/30

1/8 [==>...........................] - ETA: 0s - loss: 0.1181 - mae: 0.2629
8/8 [==============================] - 0s 3ms/step - loss: 0.0735 - mae: 0.2199

8/8 [==============================] - 0s 33ms/step - loss: 0.0735 - mae: 0.2199 - val_loss: 0.0391 - val_mae: 0.1675
Epoch 14/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0775 - mae: 0.2230
8/8 [==============================] - 0s 3ms/step - loss: 0.0697 - mae: 0.2131

8/8 [==============================] - 0s 29ms/step - loss: 0.0697 - mae: 0.2131 - val_loss: 0.0388 - val_mae: 0.1646
Epoch 15/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0620 - mae: 0.2011
8/8 [==============================] - 0s 3ms/step - loss: 0.0567 - mae: 0.1935

8/8 [==============================] - 0s 29ms/step - loss: 0.0567 - mae: 0.1935 - val_loss: 0.0339 - val_mae: 0.1505
Epoch 16/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0602 - mae: 0.1969
8/8 [==============================] - 0s 3ms/step - loss: 0.0599 - mae: 0.1982

8/8 [==============================] - 0s 36ms/step - loss: 0.0599 - mae: 0.1982 - val_loss: 0.0337 - val_mae: 0.1478
Epoch 17/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0533 - mae: 0.1766
8/8 [==============================] - 0s 3ms/step - loss: 0.0487 - mae: 0.1774

8/8 [==============================] - 0s 37ms/step - loss: 0.0487 - mae: 0.1774 - val_loss: 0.0322 - val_mae: 0.1424
Epoch 18/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0390 - mae: 0.1645
8/8 [==============================] - 0s 3ms/step - loss: 0.0431 - mae: 0.1691

8/8 [==============================] - 0s 29ms/step - loss: 0.0431 - mae: 0.1691 - val_loss: 0.0306 - val_mae: 0.1378
Epoch 19/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0377 - mae: 0.1625
8/8 [==============================] - 0s 2ms/step - loss: 0.0430 - mae: 0.1721

8/8 [==============================] - 0s 27ms/step - loss: 0.0430 - mae: 0.1721 - val_loss: 0.0296 - val_mae: 0.1349
Epoch 20/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0345 - mae: 0.1592
8/8 [==============================] - 0s 3ms/step - loss: 0.0396 - mae: 0.1602

8/8 [==============================] - 0s 38ms/step - loss: 0.0396 - mae: 0.1602 - val_loss: 0.0293 - val_mae: 0.1342
Epoch 21/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0336 - mae: 0.1483
8/8 [==============================] - 0s 3ms/step - loss: 0.0352 - mae: 0.1530

8/8 [==============================] - 0s 35ms/step - loss: 0.0352 - mae: 0.1530 - val_loss: 0.0282 - val_mae: 0.1314
Epoch 22/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0411 - mae: 0.1726
8/8 [==============================] - 0s 3ms/step - loss: 0.0330 - mae: 0.1505

8/8 [==============================] - 0s 31ms/step - loss: 0.0330 - mae: 0.1505 - val_loss: 0.0267 - val_mae: 0.1288
Epoch 23/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0304 - mae: 0.1385
8/8 [==============================] - 0s 2ms/step - loss: 0.0354 - mae: 0.1530

8/8 [==============================] - 0s 28ms/step - loss: 0.0354 - mae: 0.1530 - val_loss: 0.0261 - val_mae: 0.1275
Epoch 24/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0183 - mae: 0.1066
8/8 [==============================] - 0s 3ms/step - loss: 0.0332 - mae: 0.1468

8/8 [==============================] - 0s 30ms/step - loss: 0.0332 - mae: 0.1468 - val_loss: 0.0251 - val_mae: 0.1255
Epoch 25/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0247 - mae: 0.1264
8/8 [==============================] - 0s 3ms/step - loss: 0.0338 - mae: 0.1445

8/8 [==============================] - 0s 39ms/step - loss: 0.0338 - mae: 0.1445 - val_loss: 0.0242 - val_mae: 0.1234
Epoch 26/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0336 - mae: 0.1541
8/8 [==============================] - 0s 3ms/step - loss: 0.0262 - mae: 0.1321

8/8 [==============================] - 0s 35ms/step - loss: 0.0262 - mae: 0.1321 - val_loss: 0.0234 - val_mae: 0.1215
Epoch 27/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0282 - mae: 0.1339
8/8 [==============================] - 0s 3ms/step - loss: 0.0285 - mae: 0.1397

8/8 [==============================] - 0s 28ms/step - loss: 0.0285 - mae: 0.1397 - val_loss: 0.0231 - val_mae: 0.1207
Epoch 28/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0407 - mae: 0.1672
8/8 [==============================] - 0s 3ms/step - loss: 0.0299 - mae: 0.1386

8/8 [==============================] - 0s 30ms/step - loss: 0.0299 - mae: 0.1386 - val_loss: 0.0227 - val_mae: 0.1197
Epoch 29/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0320 - mae: 0.1486
8/8 [==============================] - 0s 3ms/step - loss: 0.0271 - mae: 0.1323

8/8 [==============================] - 0s 38ms/step - loss: 0.0271 - mae: 0.1323 - val_loss: 0.0223 - val_mae: 0.1189
Epoch 30/30

1/8 [==>...........................] - ETA: 0s - loss: 0.0292 - mae: 0.1428
8/8 [==============================] - 0s 3ms/step - loss: 0.0271 - mae: 0.1310

8/8 [==============================] - 0s 35ms/step - loss: 0.0271 - mae: 0.1310 - val_loss: 0.0222 - val_mae: 0.1185

Run completed: runs/2022-12-07T02-56-39Z

Training run 52/52 (flags = list(64, 50, 0.001, 30, 50, "relu", "tanh", 0.6, 0.2)) 
Using run directory runs/2022-12-07T02-57-01Z

> FLAGS <- flags(
+   flag_numeric("nodes", 128),
+   flag_numeric("nodes2", 32),
+   flag_numeric("batch_size", 100),
+   flag_string("activation", " ..." ... [TRUNCATED] 

> model =keras_model_sequential() 

> model %>%
+   layer_dense(units = FLAGS$nodes, activation = FLAGS$activation) %>%
+   layer_dropout(FLAGS$dropout)%>%
+   layer_dense(units = FLAGS$ .... [TRUNCATED] 

> model %>% compile(
+   optimizer = optimizer_sgd(lr=FLAGS$learning_rate), 
+   loss = 'mse',
+   metrics = list("mae"))
Warning: the `lr` argument has been renamed to `learning_rate`.

> model %>% fit(
+   as.matrix(new_train_trim), grad_train1y, epochs = FLAGS$epochs
+   ,batch_size= FLAGS$batch_size, 
+   validation_data=list(as.ma .... [TRUNCATED] 
Epoch 1/50

 1/13 [=>............................] - ETA: 6s - loss: 0.6781 - mae: 0.6903
WARNING:tensorflow:Callback method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0018s vs `on_train_batch_end` time: 0.0035s). Check your callbacks.

13/13 [==============================] - 1s 3ms/step - loss: 0.8034 - mae: 0.7641

13/13 [==============================] - 1s 66ms/step - loss: 0.8034 - mae: 0.7641 - val_loss: 0.4186 - val_mae: 0.6287
Epoch 2/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4984 - mae: 0.5764
13/13 [==============================] - 0s 3ms/step - loss: 0.5630 - mae: 0.6355

13/13 [==============================] - 0s 20ms/step - loss: 0.5630 - mae: 0.6355 - val_loss: 0.2842 - val_mae: 0.5071
Epoch 3/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4887 - mae: 0.6122
13/13 [==============================] - 0s 4ms/step - loss: 0.4826 - mae: 0.5713

13/13 [==============================] - 0s 23ms/step - loss: 0.4826 - mae: 0.5713 - val_loss: 0.1969 - val_mae: 0.4116
Epoch 4/50

 1/13 [=>............................] - ETA: 0s - loss: 0.5404 - mae: 0.6206
13/13 [==============================] - 0s 3ms/step - loss: 0.3957 - mae: 0.5240

13/13 [==============================] - 0s 22ms/step - loss: 0.3957 - mae: 0.5240 - val_loss: 0.1349 - val_mae: 0.3371
Epoch 5/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3348 - mae: 0.4712
13/13 [==============================] - 0s 3ms/step - loss: 0.3683 - mae: 0.4861

13/13 [==============================] - 0s 21ms/step - loss: 0.3683 - mae: 0.4861 - val_loss: 0.0999 - val_mae: 0.2855
Epoch 6/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3568 - mae: 0.4557
10/13 [======================>.......] - ETA: 0s - loss: 0.3744 - mae: 0.4744
13/13 [==============================] - 0s 7ms/step - loss: 0.3578 - mae: 0.4648

13/13 [==============================] - 0s 32ms/step - loss: 0.3578 - mae: 0.4648 - val_loss: 0.0814 - val_mae: 0.2556
Epoch 7/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1987 - mae: 0.3687
13/13 [==============================] - 0s 3ms/step - loss: 0.3240 - mae: 0.4688

13/13 [==============================] - 0s 21ms/step - loss: 0.3240 - mae: 0.4688 - val_loss: 0.0622 - val_mae: 0.2168
Epoch 8/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2398 - mae: 0.4004
13/13 [==============================] - 0s 3ms/step - loss: 0.2464 - mae: 0.4051

13/13 [==============================] - 0s 23ms/step - loss: 0.2464 - mae: 0.4051 - val_loss: 0.0528 - val_mae: 0.1986
Epoch 9/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3178 - mae: 0.5019
13/13 [==============================] - 0s 3ms/step - loss: 0.2570 - mae: 0.4048

13/13 [==============================] - 0s 18ms/step - loss: 0.2570 - mae: 0.4048 - val_loss: 0.0453 - val_mae: 0.1778
Epoch 10/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2747 - mae: 0.4069
13/13 [==============================] - 0s 3ms/step - loss: 0.2435 - mae: 0.3915

13/13 [==============================] - 0s 18ms/step - loss: 0.2435 - mae: 0.3915 - val_loss: 0.0397 - val_mae: 0.1644
Epoch 11/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4195 - mae: 0.5125
13/13 [==============================] - 0s 3ms/step - loss: 0.2604 - mae: 0.4009

13/13 [==============================] - 0s 21ms/step - loss: 0.2604 - mae: 0.4009 - val_loss: 0.0353 - val_mae: 0.1565
Epoch 12/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2572 - mae: 0.4081
13/13 [==============================] - 0s 3ms/step - loss: 0.2186 - mae: 0.3744

13/13 [==============================] - 0s 21ms/step - loss: 0.2186 - mae: 0.3744 - val_loss: 0.0318 - val_mae: 0.1483
Epoch 13/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1937 - mae: 0.3653
13/13 [==============================] - 0s 3ms/step - loss: 0.2297 - mae: 0.3868

13/13 [==============================] - 0s 19ms/step - loss: 0.2297 - mae: 0.3868 - val_loss: 0.0298 - val_mae: 0.1427
Epoch 14/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2829 - mae: 0.4560
13/13 [==============================] - 0s 3ms/step - loss: 0.2413 - mae: 0.3946

13/13 [==============================] - 0s 18ms/step - loss: 0.2413 - mae: 0.3946 - val_loss: 0.0273 - val_mae: 0.1348
Epoch 15/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1357 - mae: 0.2849
13/13 [==============================] - 0s 3ms/step - loss: 0.2346 - mae: 0.3811

13/13 [==============================] - 0s 20ms/step - loss: 0.2346 - mae: 0.3811 - val_loss: 0.0257 - val_mae: 0.1305
Epoch 16/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2117 - mae: 0.3539
13/13 [==============================] - 0s 3ms/step - loss: 0.2202 - mae: 0.3728

13/13 [==============================] - 0s 22ms/step - loss: 0.2202 - mae: 0.3728 - val_loss: 0.0245 - val_mae: 0.1270
Epoch 17/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1649 - mae: 0.3367
13/13 [==============================] - 0s 3ms/step - loss: 0.2245 - mae: 0.3778

13/13 [==============================] - 0s 19ms/step - loss: 0.2245 - mae: 0.3778 - val_loss: 0.0239 - val_mae: 0.1247
Epoch 18/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2335 - mae: 0.3685
13/13 [==============================] - 0s 3ms/step - loss: 0.2061 - mae: 0.3541

13/13 [==============================] - 0s 18ms/step - loss: 0.2061 - mae: 0.3541 - val_loss: 0.0232 - val_mae: 0.1230
Epoch 19/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1843 - mae: 0.3149
13/13 [==============================] - 0s 3ms/step - loss: 0.1824 - mae: 0.3354

13/13 [==============================] - 0s 19ms/step - loss: 0.1824 - mae: 0.3354 - val_loss: 0.0230 - val_mae: 0.1214
Epoch 20/50

 1/13 [=>............................] - ETA: 0s - loss: 0.4827 - mae: 0.5077
13/13 [==============================] - 0s 3ms/step - loss: 0.2261 - mae: 0.3686

13/13 [==============================] - 0s 22ms/step - loss: 0.2261 - mae: 0.3686 - val_loss: 0.0222 - val_mae: 0.1188
Epoch 21/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2325 - mae: 0.3831
13/13 [==============================] - 0s 3ms/step - loss: 0.2104 - mae: 0.3657

13/13 [==============================] - 0s 21ms/step - loss: 0.2104 - mae: 0.3657 - val_loss: 0.0216 - val_mae: 0.1172
Epoch 22/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3202 - mae: 0.4636
13/13 [==============================] - 0s 2ms/step - loss: 0.1966 - mae: 0.3448

13/13 [==============================] - 0s 18ms/step - loss: 0.1966 - mae: 0.3448 - val_loss: 0.0216 - val_mae: 0.1154
Epoch 23/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1320 - mae: 0.3046
13/13 [==============================] - 0s 3ms/step - loss: 0.1715 - mae: 0.3346

13/13 [==============================] - 0s 19ms/step - loss: 0.1715 - mae: 0.3346 - val_loss: 0.0219 - val_mae: 0.1142
Epoch 24/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1610 - mae: 0.3185
13/13 [==============================] - 0s 3ms/step - loss: 0.1792 - mae: 0.3292

13/13 [==============================] - 0s 23ms/step - loss: 0.1792 - mae: 0.3292 - val_loss: 0.0221 - val_mae: 0.1132
Epoch 25/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1973 - mae: 0.3711
13/13 [==============================] - 0s 3ms/step - loss: 0.1722 - mae: 0.3367

13/13 [==============================] - 0s 19ms/step - loss: 0.1722 - mae: 0.3367 - val_loss: 0.0215 - val_mae: 0.1103
Epoch 26/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1241 - mae: 0.2974
13/13 [==============================] - 0s 3ms/step - loss: 0.1860 - mae: 0.3494

13/13 [==============================] - 0s 18ms/step - loss: 0.1860 - mae: 0.3494 - val_loss: 0.0216 - val_mae: 0.1108
Epoch 27/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3221 - mae: 0.4204
13/13 [==============================] - 0s 3ms/step - loss: 0.1888 - mae: 0.3394

13/13 [==============================] - 0s 20ms/step - loss: 0.1888 - mae: 0.3394 - val_loss: 0.0220 - val_mae: 0.1111
Epoch 28/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1314 - mae: 0.2958
13/13 [==============================] - 0s 3ms/step - loss: 0.1656 - mae: 0.3282

13/13 [==============================] - 0s 22ms/step - loss: 0.1656 - mae: 0.3282 - val_loss: 0.0207 - val_mae: 0.1088
Epoch 29/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1246 - mae: 0.2845
13/13 [==============================] - 0s 3ms/step - loss: 0.1830 - mae: 0.3369

13/13 [==============================] - 0s 21ms/step - loss: 0.1830 - mae: 0.3369 - val_loss: 0.0204 - val_mae: 0.1074
Epoch 30/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1594 - mae: 0.3016
13/13 [==============================] - 0s 3ms/step - loss: 0.1575 - mae: 0.3124

13/13 [==============================] - 0s 18ms/step - loss: 0.1575 - mae: 0.3124 - val_loss: 0.0202 - val_mae: 0.1070
Epoch 31/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1690 - mae: 0.3323
13/13 [==============================] - 0s 3ms/step - loss: 0.1782 - mae: 0.3237

13/13 [==============================] - 0s 19ms/step - loss: 0.1782 - mae: 0.3237 - val_loss: 0.0198 - val_mae: 0.1052
Epoch 32/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1685 - mae: 0.3382
13/13 [==============================] - 0s 3ms/step - loss: 0.1472 - mae: 0.3052

13/13 [==============================] - 0s 20ms/step - loss: 0.1472 - mae: 0.3052 - val_loss: 0.0200 - val_mae: 0.1052
Epoch 33/50

 1/13 [=>............................] - ETA: 0s - loss: 0.2168 - mae: 0.3405
13/13 [==============================] - 0s 3ms/step - loss: 0.1944 - mae: 0.3445

13/13 [==============================] - 0s 20ms/step - loss: 0.1944 - mae: 0.3445 - val_loss: 0.0196 - val_mae: 0.1044
Epoch 34/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1937 - mae: 0.3220
13/13 [==============================] - 0s 3ms/step - loss: 0.1498 - mae: 0.3002

13/13 [==============================] - 0s 18ms/step - loss: 0.1498 - mae: 0.3002 - val_loss: 0.0195 - val_mae: 0.1038
Epoch 35/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1812 - mae: 0.3244
13/13 [==============================] - 0s 3ms/step - loss: 0.1551 - mae: 0.3062

13/13 [==============================] - 0s 19ms/step - loss: 0.1551 - mae: 0.3062 - val_loss: 0.0193 - val_mae: 0.1029
Epoch 36/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1312 - mae: 0.2931
13/13 [==============================] - 0s 3ms/step - loss: 0.1376 - mae: 0.2993

13/13 [==============================] - 0s 22ms/step - loss: 0.1376 - mae: 0.2993 - val_loss: 0.0188 - val_mae: 0.1017
Epoch 37/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1387 - mae: 0.2712
13/13 [==============================] - 0s 3ms/step - loss: 0.1477 - mae: 0.3009

13/13 [==============================] - 0s 22ms/step - loss: 0.1477 - mae: 0.3009 - val_loss: 0.0185 - val_mae: 0.1004
Epoch 38/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1213 - mae: 0.2799
13/13 [==============================] - 0s 3ms/step - loss: 0.1336 - mae: 0.2859

13/13 [==============================] - 0s 18ms/step - loss: 0.1336 - mae: 0.2859 - val_loss: 0.0184 - val_mae: 0.1012
Epoch 39/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1188 - mae: 0.2635
13/13 [==============================] - 0s 3ms/step - loss: 0.1440 - mae: 0.2918

13/13 [==============================] - 0s 18ms/step - loss: 0.1440 - mae: 0.2918 - val_loss: 0.0180 - val_mae: 0.1003
Epoch 40/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1194 - mae: 0.2794
13/13 [==============================] - 0s 3ms/step - loss: 0.1533 - mae: 0.3090

13/13 [==============================] - 0s 21ms/step - loss: 0.1533 - mae: 0.3090 - val_loss: 0.0183 - val_mae: 0.1004
Epoch 41/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1792 - mae: 0.3398
13/13 [==============================] - 0s 3ms/step - loss: 0.1653 - mae: 0.3207

13/13 [==============================] - 0s 22ms/step - loss: 0.1653 - mae: 0.3207 - val_loss: 0.0182 - val_mae: 0.1001
Epoch 42/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1206 - mae: 0.3037
13/13 [==============================] - 0s 2ms/step - loss: 0.1309 - mae: 0.2946

13/13 [==============================] - 0s 18ms/step - loss: 0.1309 - mae: 0.2946 - val_loss: 0.0179 - val_mae: 0.0997
Epoch 43/50

 1/13 [=>............................] - ETA: 0s - loss: 0.3089 - mae: 0.4394
13/13 [==============================] - 0s 3ms/step - loss: 0.1546 - mae: 0.3075

13/13 [==============================] - 0s 18ms/step - loss: 0.1546 - mae: 0.3075 - val_loss: 0.0183 - val_mae: 0.1007
Epoch 44/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1318 - mae: 0.3110
13/13 [==============================] - 0s 3ms/step - loss: 0.1481 - mae: 0.2997

13/13 [==============================] - 0s 21ms/step - loss: 0.1481 - mae: 0.2997 - val_loss: 0.0173 - val_mae: 0.0979
Epoch 45/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1183 - mae: 0.2846
13/13 [==============================] - 0s 3ms/step - loss: 0.1295 - mae: 0.2887

13/13 [==============================] - 0s 22ms/step - loss: 0.1295 - mae: 0.2887 - val_loss: 0.0166 - val_mae: 0.0958
Epoch 46/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1327 - mae: 0.2652
13/13 [==============================] - 0s 3ms/step - loss: 0.1374 - mae: 0.2912

13/13 [==============================] - 0s 18ms/step - loss: 0.1374 - mae: 0.2912 - val_loss: 0.0166 - val_mae: 0.0959
Epoch 47/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1361 - mae: 0.3015
13/13 [==============================] - 0s 2ms/step - loss: 0.1469 - mae: 0.3046

13/13 [==============================] - 0s 18ms/step - loss: 0.1469 - mae: 0.3046 - val_loss: 0.0167 - val_mae: 0.0964
Epoch 48/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1556 - mae: 0.3192
13/13 [==============================] - 0s 3ms/step - loss: 0.1328 - mae: 0.2855

13/13 [==============================] - 0s 20ms/step - loss: 0.1328 - mae: 0.2855 - val_loss: 0.0171 - val_mae: 0.0980
Epoch 49/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1307 - mae: 0.3088
13/13 [==============================] - 0s 3ms/step - loss: 0.1403 - mae: 0.2936

13/13 [==============================] - 0s 20ms/step - loss: 0.1403 - mae: 0.2936 - val_loss: 0.0166 - val_mae: 0.0964
Epoch 50/50

 1/13 [=>............................] - ETA: 0s - loss: 0.1683 - mae: 0.3174
13/13 [==============================] - 0s 3ms/step - loss: 0.1337 - mae: 0.2947

13/13 [==============================] - 0s 19ms/step - loss: 0.1337 - mae: 0.2947 - val_loss: 0.0176 - val_mae: 0.0987

Run completed: runs/2022-12-07T02-57-01Z
runs 
Data frame: 52 x 29 
# ... with 42 more rows
# ... with 24 more columns:
#   flag_nodes, flag_nodes2, flag_batch_size, flag_activation, flag_activation2,
#   flag_learning_rate, flag_epochs, flag_dropout, flag_dropout2, epochs,
#   epochs_completed, metrics, model, loss_function, optimizer, learning_rate,
#   script, start, end, completed, output, source_code, context, type

view_run(runs[which.min(runs$metric_val_loss),]) 
Warning: argument 'compressed' is ignored for the internal methodWarning: incomplete final line found on 'C:\Users\arzav\AppData\Local\Temp\RtmpeOK5jP\file1b6c5ceb52c2/source/ProjectScript.R'Warning: incomplete final line found on 'C:\Users\arzav\AppData\Local\Temp\RtmpeOK5jP\file1b6c5ceb52c2/source/MLScriptAssign.R'Warning: incomplete final line found on 'C:\Users\arzav\AppData\Local\Temp\RtmpeOK5jP\file1b6c5ceb52c2/source/Script.R'
set.seed(1)
train1<-rbind(new_train_trim,new_val_trim) 
trainy<-c(grad_train1y,grad_valy)
set.seed(1)
model = keras_model_sequential()
 

model %>%
  layer_dense(units=32, activation="relu", input_shape=dim(train1)[2])%>%
  layer_dropout(0.2)%>%
  layer_dense(units=50, activation="tanh")%>%
  layer_dropout(0.6) %>%
  layer_dense(units=1)

model
Model: "sequential"
__________________________________________________________________________________________
 Layer (type)                           Output Shape                        Param #       
==========================================================================================
 dense_2 (Dense)                        (None, 32)                          288           
 dropout_1 (Dropout)                    (None, 32)                          0             
 dense_1 (Dense)                        (None, 50)                          1650          
 dropout (Dropout)                      (None, 50)                          0             
 dense (Dense)                          (None, 1)                           51            
==========================================================================================
Total params: 1,989
Trainable params: 1,989
Non-trainable params: 0
__________________________________________________________________________________________
set.seed(1)
model %>% compile(optimizer_sgd(lr=0.01), loss = 'mse', metrics = list("mae")) 
Warning: the `lr` argument has been renamed to `learning_rate`.
model 
Model: "sequential"
__________________________________________________________________________________________
 Layer (type)                           Output Shape                        Param #       
==========================================================================================
 dense_2 (Dense)                        (None, 32)                          288           
 dropout_1 (Dropout)                    (None, 32)                          0             
 dense_1 (Dense)                        (None, 50)                          1650          
 dropout (Dropout)                      (None, 50)                          0             
 dense (Dense)                          (None, 1)                           51            
==========================================================================================
Total params: 1,989
Trainable params: 1,989
Non-trainable params: 0
__________________________________________________________________________________________
set.seed(111)
model %>%fit(as.matrix(train1),trainy,epochs=50,batch_size=30) 
Epoch 1/50

 1/14 [=>............................] - ETA: 8s - loss: 1.2126 - mae: 0.9645
14/14 [==============================] - 1s 3ms/step - loss: 0.6349 - mae: 0.6460

14/14 [==============================] - 1s 44ms/step - loss: 0.6349 - mae: 0.6460
Epoch 2/50

 1/14 [=>............................] - ETA: 0s - loss: 0.3648 - mae: 0.5059
14/14 [==============================] - 0s 4ms/step - loss: 0.3551 - mae: 0.4685

14/14 [==============================] - 0s 30ms/step - loss: 0.3551 - mae: 0.4685
Epoch 3/50

 1/14 [=>............................] - ETA: 0s - loss: 0.1829 - mae: 0.3184
14/14 [==============================] - 0s 3ms/step - loss: 0.2755 - mae: 0.4189

14/14 [==============================] - 0s 14ms/step - loss: 0.2755 - mae: 0.4189
Epoch 4/50

 1/14 [=>............................] - ETA: 0s - loss: 0.2586 - mae: 0.3737
14/14 [==============================] - 0s 3ms/step - loss: 0.2582 - mae: 0.3869

14/14 [==============================] - 0s 19ms/step - loss: 0.2582 - mae: 0.3869
Epoch 5/50

 1/14 [=>............................] - ETA: 0s - loss: 0.1479 - mae: 0.3108
14/14 [==============================] - 0s 3ms/step - loss: 0.2229 - mae: 0.3657

14/14 [==============================] - 0s 23ms/step - loss: 0.2229 - mae: 0.3657
Epoch 6/50

 1/14 [=>............................] - ETA: 0s - loss: 0.2438 - mae: 0.4098
14/14 [==============================] - 0s 4ms/step - loss: 0.1690 - mae: 0.3114

14/14 [==============================] - 0s 22ms/step - loss: 0.1690 - mae: 0.3114
Epoch 7/50

 1/14 [=>............................] - ETA: 0s - loss: 0.2838 - mae: 0.4286
14/14 [==============================] - 0s 3ms/step - loss: 0.1552 - mae: 0.3123

14/14 [==============================] - 0s 19ms/step - loss: 0.1552 - mae: 0.3123
Epoch 8/50

 1/14 [=>............................] - ETA: 0s - loss: 0.2352 - mae: 0.3745
14/14 [==============================] - 0s 3ms/step - loss: 0.1344 - mae: 0.2867

14/14 [==============================] - 0s 18ms/step - loss: 0.1344 - mae: 0.2867
Epoch 9/50

 1/14 [=>............................] - ETA: 0s - loss: 0.1261 - mae: 0.2772
14/14 [==============================] - 0s 3ms/step - loss: 0.1345 - mae: 0.2841

14/14 [==============================] - 0s 24ms/step - loss: 0.1345 - mae: 0.2841
Epoch 10/50

 1/14 [=>............................] - ETA: 0s - loss: 0.1602 - mae: 0.2899
14/14 [==============================] - 0s 3ms/step - loss: 0.1210 - mae: 0.2667

14/14 [==============================] - 0s 24ms/step - loss: 0.1210 - mae: 0.2667
Epoch 11/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0885 - mae: 0.2366
14/14 [==============================] - 0s 3ms/step - loss: 0.0899 - mae: 0.2374

14/14 [==============================] - 0s 22ms/step - loss: 0.0899 - mae: 0.2374
Epoch 12/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0684 - mae: 0.2227
14/14 [==============================] - 0s 3ms/step - loss: 0.0771 - mae: 0.2214

14/14 [==============================] - 0s 19ms/step - loss: 0.0771 - mae: 0.2214
Epoch 13/50

 1/14 [=>............................] - ETA: 0s - loss: 0.1163 - mae: 0.2699
14/14 [==============================] - 0s 3ms/step - loss: 0.0906 - mae: 0.2378

14/14 [==============================] - 0s 22ms/step - loss: 0.0906 - mae: 0.2378
Epoch 14/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0965 - mae: 0.2470
14/14 [==============================] - 0s 3ms/step - loss: 0.0847 - mae: 0.2261

14/14 [==============================] - 0s 20ms/step - loss: 0.0847 - mae: 0.2261
Epoch 15/50

 1/14 [=>............................] - ETA: 0s - loss: 0.1032 - mae: 0.2835
14/14 [==============================] - 0s 3ms/step - loss: 0.0799 - mae: 0.2201

14/14 [==============================] - 0s 19ms/step - loss: 0.0799 - mae: 0.2201
Epoch 16/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0367 - mae: 0.1439
14/14 [==============================] - 0s 3ms/step - loss: 0.0665 - mae: 0.2036

14/14 [==============================] - 0s 20ms/step - loss: 0.0665 - mae: 0.2036
Epoch 17/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0487 - mae: 0.1834
14/14 [==============================] - 0s 4ms/step - loss: 0.0634 - mae: 0.2001

14/14 [==============================] - 0s 23ms/step - loss: 0.0634 - mae: 0.2001
Epoch 18/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0706 - mae: 0.2150
14/14 [==============================] - 0s 3ms/step - loss: 0.0547 - mae: 0.1817

14/14 [==============================] - 0s 20ms/step - loss: 0.0547 - mae: 0.1817
Epoch 19/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0536 - mae: 0.1878
14/14 [==============================] - 0s 3ms/step - loss: 0.0552 - mae: 0.1825

14/14 [==============================] - 0s 20ms/step - loss: 0.0552 - mae: 0.1825
Epoch 20/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0571 - mae: 0.1905
14/14 [==============================] - 0s 3ms/step - loss: 0.0523 - mae: 0.1788

14/14 [==============================] - 0s 20ms/step - loss: 0.0523 - mae: 0.1788
Epoch 21/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0468 - mae: 0.1667
14/14 [==============================] - 0s 3ms/step - loss: 0.0444 - mae: 0.1718

14/14 [==============================] - 0s 19ms/step - loss: 0.0444 - mae: 0.1718
Epoch 22/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0513 - mae: 0.1860
14/14 [==============================] - 0s 3ms/step - loss: 0.0485 - mae: 0.1750

14/14 [==============================] - 0s 22ms/step - loss: 0.0485 - mae: 0.1750
Epoch 23/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0367 - mae: 0.1496
14/14 [==============================] - 0s 3ms/step - loss: 0.0425 - mae: 0.1639

14/14 [==============================] - 0s 20ms/step - loss: 0.0425 - mae: 0.1639
Epoch 24/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0358 - mae: 0.1391
14/14 [==============================] - 0s 2ms/step - loss: 0.0411 - mae: 0.1580

14/14 [==============================] - 0s 18ms/step - loss: 0.0411 - mae: 0.1580
Epoch 25/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0449 - mae: 0.1833
14/14 [==============================] - 0s 4ms/step - loss: 0.0385 - mae: 0.1552

14/14 [==============================] - 0s 21ms/step - loss: 0.0385 - mae: 0.1552
Epoch 26/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0271 - mae: 0.1318
14/14 [==============================] - 0s 3ms/step - loss: 0.0360 - mae: 0.1532

14/14 [==============================] - 0s 20ms/step - loss: 0.0360 - mae: 0.1532
Epoch 27/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0414 - mae: 0.1633
14/14 [==============================] - 0s 3ms/step - loss: 0.0335 - mae: 0.1461

14/14 [==============================] - 0s 20ms/step - loss: 0.0335 - mae: 0.1461
Epoch 28/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0273 - mae: 0.1294
14/14 [==============================] - 0s 3ms/step - loss: 0.0278 - mae: 0.1316

14/14 [==============================] - 0s 20ms/step - loss: 0.0278 - mae: 0.1316
Epoch 29/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0145 - mae: 0.0921
14/14 [==============================] - 0s 3ms/step - loss: 0.0280 - mae: 0.1303

14/14 [==============================] - 0s 19ms/step - loss: 0.0280 - mae: 0.1303
Epoch 30/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0303 - mae: 0.1349
14/14 [==============================] - 0s 3ms/step - loss: 0.0283 - mae: 0.1332

14/14 [==============================] - 0s 21ms/step - loss: 0.0283 - mae: 0.1332
Epoch 31/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0207 - mae: 0.1236
14/14 [==============================] - 0s 3ms/step - loss: 0.0260 - mae: 0.1282

14/14 [==============================] - 0s 20ms/step - loss: 0.0260 - mae: 0.1282
Epoch 32/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0242 - mae: 0.0989
14/14 [==============================] - 0s 3ms/step - loss: 0.0230 - mae: 0.1179

14/14 [==============================] - 0s 21ms/step - loss: 0.0230 - mae: 0.1179
Epoch 33/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0246 - mae: 0.1232
14/14 [==============================] - 0s 3ms/step - loss: 0.0254 - mae: 0.1273

14/14 [==============================] - 0s 21ms/step - loss: 0.0254 - mae: 0.1273
Epoch 34/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0291 - mae: 0.1465
14/14 [==============================] - 0s 3ms/step - loss: 0.0249 - mae: 0.1262

14/14 [==============================] - 0s 19ms/step - loss: 0.0249 - mae: 0.1262
Epoch 35/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0213 - mae: 0.1196
14/14 [==============================] - 0s 3ms/step - loss: 0.0226 - mae: 0.1215

14/14 [==============================] - 0s 22ms/step - loss: 0.0226 - mae: 0.1215
Epoch 36/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0311 - mae: 0.1296
14/14 [==============================] - 0s 3ms/step - loss: 0.0215 - mae: 0.1167

14/14 [==============================] - 0s 17ms/step - loss: 0.0215 - mae: 0.1167
Epoch 37/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0268 - mae: 0.1289
14/14 [==============================] - 0s 3ms/step - loss: 0.0234 - mae: 0.1177

14/14 [==============================] - 0s 23ms/step - loss: 0.0234 - mae: 0.1177
Epoch 38/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0323 - mae: 0.1523
14/14 [==============================] - 0s 3ms/step - loss: 0.0199 - mae: 0.1120

14/14 [==============================] - 0s 23ms/step - loss: 0.0199 - mae: 0.1120
Epoch 39/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0129 - mae: 0.0946
14/14 [==============================] - 0s 3ms/step - loss: 0.0199 - mae: 0.1129

14/14 [==============================] - 0s 18ms/step - loss: 0.0199 - mae: 0.1129
Epoch 40/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0202 - mae: 0.1135
14/14 [==============================] - 0s 3ms/step - loss: 0.0208 - mae: 0.1160

14/14 [==============================] - 0s 22ms/step - loss: 0.0208 - mae: 0.1160
Epoch 41/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0163 - mae: 0.0975
14/14 [==============================] - 0s 4ms/step - loss: 0.0185 - mae: 0.1083

14/14 [==============================] - 0s 21ms/step - loss: 0.0185 - mae: 0.1083
Epoch 42/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0192 - mae: 0.1213
12/14 [========================>.....] - ETA: 0s - loss: 0.0156 - mae: 0.0996
14/14 [==============================] - 0s 5ms/step - loss: 0.0158 - mae: 0.1001

14/14 [==============================] - 0s 24ms/step - loss: 0.0158 - mae: 0.1001
Epoch 43/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0168 - mae: 0.1048
14/14 [==============================] - 0s 3ms/step - loss: 0.0174 - mae: 0.1027

14/14 [==============================] - 0s 23ms/step - loss: 0.0174 - mae: 0.1027
Epoch 44/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0237 - mae: 0.1169
14/14 [==============================] - 0s 3ms/step - loss: 0.0195 - mae: 0.1103

14/14 [==============================] - 0s 20ms/step - loss: 0.0195 - mae: 0.1103
Epoch 45/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0221 - mae: 0.1232
14/14 [==============================] - 0s 3ms/step - loss: 0.0191 - mae: 0.1101

14/14 [==============================] - 0s 19ms/step - loss: 0.0191 - mae: 0.1101
Epoch 46/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0271 - mae: 0.1255
14/14 [==============================] - 0s 3ms/step - loss: 0.0156 - mae: 0.0971

14/14 [==============================] - 0s 19ms/step - loss: 0.0156 - mae: 0.0971
Epoch 47/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0182 - mae: 0.1073
14/14 [==============================] - 0s 3ms/step - loss: 0.0166 - mae: 0.1014

14/14 [==============================] - 0s 23ms/step - loss: 0.0166 - mae: 0.1014
Epoch 48/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0169 - mae: 0.1029
14/14 [==============================] - 0s 3ms/step - loss: 0.0147 - mae: 0.0964

14/14 [==============================] - 0s 21ms/step - loss: 0.0147 - mae: 0.0964
Epoch 49/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0123 - mae: 0.0806
14/14 [==============================] - 0s 3ms/step - loss: 0.0129 - mae: 0.0918

14/14 [==============================] - 0s 19ms/step - loss: 0.0129 - mae: 0.0918
Epoch 50/50

 1/14 [=>............................] - ETA: 0s - loss: 0.0175 - mae: 0.1107
14/14 [==============================] - 0s 3ms/step - loss: 0.0141 - mae: 0.0937

14/14 [==============================] - 0s 21ms/step - loss: 0.0141 - mae: 0.0937
pred_neural<-as.numeric(model %>% predict(as.matrix(new_test_trim))) 

1/4 [======>.......................] - ETA: 0s
4/4 [==============================] - 0s 2ms/step

4/4 [==============================] - 0s 2ms/step
pred_neural
 [1] 0.6971325 0.6336560 0.6467903 0.6305943 0.9291342 0.5163135 0.9379412 0.8544554
 [9] 0.6266875 0.8200643 0.8278280 0.9211281 0.7803812 0.6792844 0.6394218 0.5480188
[17] 0.7697498 0.9188482 0.6540645 0.5444588 0.7390270 0.8855432 0.7568695 0.5662756
[25] 0.6289923 0.7390184 0.7195928 0.9125524 0.8658014 0.7260489 0.5630175 0.5456111
[33] 0.8062238 0.6973618 0.9162984 0.8512872 0.6421623 0.7076024 0.9519051 0.9032892
[41] 0.9002507 0.8029948 0.6085030 0.5586514 0.6774643 0.6709719 0.8105849 0.6394858
[49] 0.8776145 0.5464195 0.6782404 0.7336605 0.7779120 0.6586727 0.5794246 0.6755283
[57] 0.8916051 0.7658147 0.6703614 0.6892182 0.6873726 0.6710893 0.7639762 0.6007479
[65] 0.5705675 0.7761030 0.5331783 0.6835363 0.8183812 0.7959156 0.7887239 0.6579330
[73] 0.7268230 0.5001137 0.5252127 0.5943190 0.5734532 0.7577112 0.7036553 0.5711360
[81] 0.6132168 0.6134114 0.6821070 0.6822758 0.7411295 0.6268914 0.9160996 0.8868576
[89] 0.6550977 0.6529654 0.8619550 0.8289328 0.7051621 0.5030243 0.6251431 0.6447548
[97] 0.6335581 0.6256747
RMSE(pred_neural,grad_test1y) 
[1] 0.06585196

By looking at all the RMSE values of the models we can conclude that Random Forest has the least RMSE with ~0.624 so it performs the best on the dataset.

---
title: "Graduate Admission"
output:
  html_document:
    df_print: paged
---

# Data reading and cleaning.

```{r}
grad_set <- read.csv(file = "C:/Users/arzav/Downloads/Admission_Predict_Ver1.1.csv", header = TRUE, stringsAsFactors=FALSE) 
grad_set
```



```{r}
str(grad_set)

```
The dataset consists of 500 observations and 9 variables. 

It has 9 numerical variables. 

```{r}
summary(grad_set)
```
University ranking ranges from 0-5
LOR(letter of recommendation) ranges from 0-5
SOP(statement of purpose) ranges from 0-5
Research is either O or 1 this indicates whether the candidate has research experience or not in undergrad.

```{r}
colSums(is.na(grad_set))
```
As, The 'Serial no.' is not necessary for the models.So, we will remove this column.


```{r}
grad_set$Serial.No.<-NULL 

```


```{r}
str(grad_set) 
```


```{r}
colSums(is.na(grad_set)) 
```


```{r}
grad_set$Research<-as.factor(grad_set$Research) 
```

Research is converted into factor variable

```{r}
table(grad_set$Research) 

```


```{r}
str(grad_set) 
```
# Exploratory Data analysis.

```{r}
library(ggplot2)
ggplot(grad_set, aes(x=Chance.of.Admit, fill=after_stat(count)))+geom_histogram()+ggtitle("Chance.of.Admit")+ylab("frequency")+xlab("Chance.of.Admit")+theme(plot.title=element_text(hjust=0.5))+theme_minimal() 
```
The tallest bar from the histogram reaches a frequency of as high as 40. There are totally two bars of same height, where one bar lies in the range of 0.6 and 0.7, where as the other bar is at the range of 0.7 and 0.8.


```{r}
corr <- cor(grad_set[,c("GRE.Score","TOEFL.Score","University.Rating","SOP","LOR","CGPA","Chance.of.Admit")])
corr 

```


```{r}
library(ggcorrplot)
ggcorrplot(corr, hc.order = TRUE, type = "lower", lab = TRUE, lab_size = 3, method="circle", colors = c("blue", "white", "red"), outline.color = "gray", show.legend = TRUE, show.diag = FALSE, title="College variables") 
```


```{r}
cor.test(grad_set$Chance.of.Admit, grad_set$GRE.Score)
cor.test(grad_set$Chance.of.Admit, grad_set$TOEFL.Score)
cor.test(grad_set$Chance.of.Admit, grad_set$University.Rating)
cor.test(grad_set$Chance.of.Admit, grad_set$SOP)
cor.test(grad_set$Chance.of.Admit, grad_set$LOR)
cor.test(grad_set$Chance.of.Admit, grad_set$CGPA) 
```
From the above plots and tests we can see that all the numeric variables are extremely closely associated with the target variable

```{r}
Plot1 = ggplot(grad_set, aes(x = Chance.of.Admit, y = Research)) + geom_boxplot() 
Plot1 

```



```{r}
t.test(Chance.of.Admit~Research,data=grad_set) 
```
From the above plots and test we can conclude that Research is closely associated with Chance.of.Admit

```{r}
ggplot(grad_set, aes(Chance.of.Admit, color=factor(Research)))+
  geom_density(alpha=0.5)+ggtitle("Chance of admit vs Research Distribution") 
```


# Splitting and Training the data.

Before we can develop the model, we must divide the data into train and test datasets. We will use the train dataset to develop a linear regression model, and the test dataset as a comparison to check if the model becomes overfit or cannot predict fresh data. We will utilize 80% of the data as training data and the remaining 20% as testing data.

```{r}
set.seed(1)

library(lattice)
library(caret)
train.index=createDataPartition(grad_set$Chance.of.Admit, p=0.8, list = FALSE)
grad_train<-grad_set[train.index, ]
grad_test <-grad_set[-train.index, ]
grad_train_labels = grad_train[train.index, 8]
grad_test_labels = grad_test[-train.index,8] 

```


```{r}
grad_train 
grad_test
```

# Multiple Linear Regression.

```{r}
set.seed(1)

train.control =trainControl(method = "cv", number = 5)
linear_model<-train(Chance.of.Admit~.,data = grad_train, method = 
"lm",trControl = train.control)
linear_model

```


```{r}
summary(linear_model) 
```
Linear regression is a model with a great interpretability, thus we'll interpret this simple linear regression model immediately. - Intercept-based:

Based on the coefficient or slope: When CGPA increases by one value, the Chance.of.Admit increases by around 0.12.

According to the P-value, CGPA is a significant predictor with a linear impact.

Based on R-squared values of 0.8244, the predictor chosen is enough to explain the target variable.

```{r}
grad_pred<-predict(linear_model, grad_test) 
grad_pred

```


```{r}
RMSE(grad_pred,grad_test$Chance.of.Admit) 
```

# Backward selection method

We can use Step-wise Regression to finding a combination of predictors that produces the best model based on lowest RMSE value. There are 3 types of Step-wise Regression such as Forward, Backward, and both. We will use LeapBackward. We named it step_model.

```{r}
train.control2 = trainControl(method = "cv", number = 10) 
step_model <- train(Chance.of.Admit~., data = grad_train,method =
"leapBackward",trControl = train.control2, tuneGrid=data.frame(nvmax=1:7))
step_model
```



```{r}
summary(step_model$finalModel)
 
```


```{r}
stepwise_grad_pred<-predict(step_model,grad_test)
stepwise_grad_pred 

```


```{r}
RMSE(stepwise_grad_pred,grad_test$Chance.of.Admit) 
```


# Regression Trees

```{r}
library(rpart) 
reg_tree_data <- rpart(Chance.of.Admit ~ ., data = grad_train)
reg_tree_data

```


```{r}
best<-reg_tree_data$cptable[which.min(reg_tree_data$cptable[,"xerror"]),"CP"] 
```


```{r}
pruned_tree<-prune(reg_tree_data,cp=best) 
```


```{r}
library(rpart.plot)
prp(pruned_tree) 
```


```{r}
regtree_pred<-predict(reg_tree_data,grad_test)
regtree_pred 

```


```{r}
RMSE(regtree_pred,grad_test$Chance.of.Admit) 

```



```{r}
set.seed(1)
lasso <- train(
Chance.of.Admit ~., data = grad_train, method = "glmnet",
trControl = trainControl("cv", number = 10),
preProcess=c("knnImpute","nzv"), 
tuneGrid = expand.grid(alpha = 1, lambda = 10^seq(3, -3, length = 100)))

```


```{r}
lasso 

```


```{r}
predic_lasso <- predict(lasso,grad_test) 
predic_lasso
```


```{r}
RMSE(predic_lasso, grad_test$Chance.of.Admit) 

```


```{r}
coef(lasso$finalModel, lasso$bestTune$lambda) 

```
Here we can see that only one variable has been shrunked to zero i.e SOP

```{r}
set.seed(1)
ridge <- train(
Chance.of.Admit ~., data = grad_train, method = "glmnet",
trControl = trainControl("cv", number = 5),
na.action = na.pass, 
preProcess=c("knnImpute","nzv"),
tuneGrid = expand.grid(alpha = 0, lambda = 10^seq(-3, 3, length = 
100)))

```

```{r}
ridge 
```


```{r}
predict_ridge <- predict(ridge,grad_test) 
predict_ridge
```



```{r}
RMSE(predict_ridge,grad_test$Chance.of.Admit) 

```


```{r}
set.seed(1)
enet <- train(
Chance.of.Admit~., data = grad_train, method = "glmnet", 
trControl = trainControl("cv", number = 10),
preProcess=c("knnImpute","nzv"),
tuneGrid = expand.grid(alpha =seq(0,1, length=10), lambda = 10^seq(-
3, 3, length = 100)))

```

```{r}
enet 
```


```{r}
pred_elast<-predict(enet,grad_test)
pred_elast 

```


```{r}
RMSE(pred_elast,grad_test$Chance.of.Admit) 

```



```{r}
set.seed(1)
grad_rf <- train(Chance.of.Admit ~ ., data = grad_train, method = "rf", trControl = trainControl(method = "cv",number = 10),preProcess=c("knnImpute","nzv"), tuneGrid = expand.grid(mtry=c(2,4,8)))

```


```{r}
grad_rf 

```

```{r}
pred_forest<-predict(grad_rf,grad_test)
pred_forest 
```

```{r}
RMSE(pred_forest, grad_test$Chance.of.Admit) 
```

```{r}
varImp(grad_rf)
```
We can se that there are 6 variables which are most important out of these CGPA is the most important


```{r}
set.seed(1)

gbm <- train(
Chance.of.Admit ~., data = grad_train, method = "gbm",na.action = na.pass,
trControl = trainControl("cv", number = 10))
```
```{r}
predictions_gradiant=predict(gbm, grad_test)
predictions_gradiant
```

```{r}
RMSE(predictions_gradiant,grad_test$Chance.of.Admit)
```


```{r}
set.seed(1)

svmln <- train( 
Chance.of.Admit ~., data = grad_train, method = "svmLinear",
preProcess=c("knnImpute","nzv"),
trControl = trainControl("cv", number = 10))


```


```{r}
svmln
 
```


```{r}
predict_svm1=predict(svmln, grad_test ) 
predict_svm1

```


```{r}
RMSE(predict_svm1,grad_test$Chance.of.Admit) 

```


```{r}
set.seed(1)

svmr <- train(
Chance.of.Admit ~., data = grad_train, method = "svmRadial",
preProcess=c("knnImpute","nzv"),
trControl = trainControl("cv", number = 10))
 

```

```{r}
svmr
```


```{r}
predict_svmrad<-predict(svmr,grad_test)
predict_svmrad 

```


```{r}
RMSE(predict_svmrad,grad_test$Chance.of.Admit)
 
```

```{r}
compare=resamples(list(Ran=grad_rf,G=gbm,SL=svmln,SR=svmr))
summary(compare)
```


```{r}
set.seed(1)

inTrain = createDataPartition(grad_train$Chance.of.Admit, p=0.9, list=FALSE)
grad_90_train = grad_train[inTrain,]
grad_val = grad_train[-inTrain,] 

```


```{r}
str(grad_90_train) 
```



```{r}
set.seed(1)

grad_train1x<-grad_90_train[,-8] 
grad_train1y<-grad_90_train[,8]
grad_test1x<-grad_test[,-8]
grad_test1y<-grad_test[,8]
grad_valx<-grad_val[,-8]
grad_valy<-grad_val[,8]

```
```{r}
set.seed(1)
preproc <- preProcess(grad_train1x, method="knnImpute") 

train.imputed <- predict(preproc, grad_train1x)
test.imputed <- predict(preproc, grad_test1x)  
val_imputed <- predict(preproc, grad_valx)
```


```{r}
new_train<-train.imputed
new_test<-test.imputed 
new_val<-val_imputed
new_train
new_test
new_val
```


```{r}
set.seed(1)
library(mltools)
library(data.table) 
new_train_trim<-one_hot(data.table(new_train), cols = "auto", sparsifyNAs = FALSE, naCols = FALSE,dropCols = TRUE, dropUnusedLevels = FALSE)

new_test_trim<-one_hot(data.table(new_test), cols = "auto", sparsifyNAs = FALSE, naCols = FALSE,dropCols = TRUE, dropUnusedLevels = FALSE)

new_val_trim<-one_hot(data.table(new_val), cols = "auto", sparsifyNAs = FALSE, naCols = FALSE,dropCols = TRUE, dropUnusedLevels = FALSE)
```

```{r}
new_train_trim<-as.data.frame(new_train_trim)
new_test_trim<-as.data.frame(new_test_trim)
new_val_trim<-as.data.frame(new_val_trim) 
new_train_trim
new_test_trim
new_val_trim
```



```{r}
library(keras)
library(tensorflow)
library(caret) 
set.seed(1)

model <- keras_model_sequential() 

model %>%
  layer_dense(units=128, activation="relu", input_shape=dim(new_train_trim)[2])%>%
  layer_dropout(0.2)%>%
  layer_dense(units=128, activation="relu")%>%
  layer_dropout(0.2) %>%
  layer_dense(units=1)

model

```
```{r}
set.seed(1)
model %>% compile(
optimizer = "sgd", 
loss = 'mse', 
metrics = list("mae"))

model
```



```{r}
set.seed(111)
model %>% fit(
as.matrix(new_train_trim), grad_train1y, epochs = 30,  
batch_size=100, validation_data=list(as.matrix(new_val_trim),
grad_valy )) 

model

```


```{r}
set.seed(1)
model %>% evaluate(as.matrix(new_train_trim), grad_train1y) 

```


```{r}
set.seed(1)
library(tfruns)
runs <- tuning_run("~/ProjectScript.R", 
                   flags = list( 
                     nodes = c(16, 32, 64),
                     nodes2 = c(50, 32, 10),
                     learning_rate = c(0.01,  0.001),
                     batch_size = c(30, 50),
                     epochs = c(30, 50),
                     activation = c("relu", "sigmoid", "tanh"),
                     activation2 = c("relu", "sigmoid", "tanh"),
                     dropout = c(0.2, 0.6),
                     dropout2 = c(0.2, 0.6)
                   ), sample = 0.02
                   )

```

```{r}
runs 

```


```{r}

view_run(runs[which.min(runs$metric_val_loss),]) 

```


```{r}
set.seed(1)
train1<-rbind(new_train_trim,new_val_trim) 
trainy<-c(grad_train1y,grad_valy)

```


```{r}
set.seed(1)
model = keras_model_sequential()
 

model %>%
  layer_dense(units=32, activation="relu", input_shape=dim(train1)[2])%>%
  layer_dropout(0.2)%>%
  layer_dense(units=50, activation="tanh")%>%
  layer_dropout(0.6) %>%
  layer_dense(units=1)

model

```


```{r}
set.seed(1)
model %>% compile(optimizer_sgd(lr=0.01), loss = 'mse', metrics = list("mae")) 

```

```{r}
model 
```


```{r}
set.seed(111)
model %>%fit(as.matrix(train1),trainy,epochs=50,batch_size=30) 

```


```{r}
pred_neural<-as.numeric(model %>% predict(as.matrix(new_test_trim))) 
pred_neural
```


```{r}
RMSE(pred_neural,grad_test1y) 

```

By looking at all the RMSE values of the models we can conclude that Random Forest has the least RMSE with ~0.624 so it performs the best on the dataset.




